blob: 59bc14e72f6cba89f8f438d8cbd1fe0d29ef9ecb [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "code-stubs.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010037#include "ic-inl.h"
38#include "jsregexp.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010039#include "jump-target-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "regexp-macro-assembler.h"
42#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "register-allocator-inl.h"
44#include "runtime.h"
45#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080046#include "stub-cache.h"
Steve Block6ded16b2010-05-10 14:33:55 +010047#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010048#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000049
50namespace v8 {
51namespace internal {
52
Kristian Monsen25f61362010-05-21 11:50:48 +010053
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010054#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000055
56// -------------------------------------------------------------------------
57// Platform-specific DeferredCode functions.
58
59void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010060 // On ARM you either have a completely spilled frame or you
61 // handle it yourself, but at the moment there's no automation
62 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
66void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067}
68
69
70// -------------------------------------------------------------------------
71// Platform-specific RuntimeCallHelper functions.
72
73void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
74 frame_state_->frame()->AssertIsSpilled();
75}
76
77
78void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
79}
80
81
Ben Murdochb0fe1622011-05-05 13:52:32 +010082void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010083 masm->EnterInternalFrame();
84}
85
86
Ben Murdochb0fe1622011-05-05 13:52:32 +010087void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010088 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +000089}
90
91
92// -------------------------------------------------------------------------
93// CodeGenState implementation.
94
95CodeGenState::CodeGenState(CodeGenerator* owner)
96 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 previous_(owner->state()) {
98 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +000099}
100
101
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
103 JumpTarget* true_target,
104 JumpTarget* false_target)
105 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 false_target_(false_target) {
108 owner->set_state(this);
109}
110
111
112TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
113 Slot* slot,
114 TypeInfo type_info)
115 : CodeGenState(owner),
116 slot_(slot) {
117 owner->set_state(this);
118 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000119}
120
121
122CodeGenState::~CodeGenState() {
123 ASSERT(owner_->state() == this);
124 owner_->set_state(previous_);
125}
126
127
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100128TypeInfoCodeGenState::~TypeInfoCodeGenState() {
129 owner()->set_type_info(slot_, old_type_info_);
130}
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132// -------------------------------------------------------------------------
133// CodeGenerator implementation
134
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100135int CodeGenerator::inlined_write_barrier_size_ = -1;
136
Andrei Popescu31002712010-02-23 13:46:05 +0000137CodeGenerator::CodeGenerator(MacroAssembler* masm)
138 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000139 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000140 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 frame_(NULL),
142 allocator_(NULL),
143 cc_reg_(al),
144 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100145 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100146 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100147 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 function_return_is_shadowed_(false) {
149}
150
151
152// Calling conventions:
153// fp: caller's frame pointer
154// sp: stack pointer
155// r1: called JS function
156// cp: callee's context
157
Andrei Popescu402d9372010-02-26 13:31:12 +0000158void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000159 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000160 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100161 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
163 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000164 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100165
166 int slots = scope()->num_parameters() + scope()->num_stack_slots();
167 ScopedVector<TypeInfo> type_info_array(slots);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100168 for (int i = 0; i < slots; i++) {
169 type_info_array[i] = TypeInfo::Unknown();
170 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100171 type_info_ = &type_info_array;
172
Steve Blocka7e24c12009-10-30 11:49:00 +0000173 ASSERT(allocator_ == NULL);
174 RegisterAllocator register_allocator(this);
175 allocator_ = &register_allocator;
176 ASSERT(frame_ == NULL);
177 frame_ = new VirtualFrame();
178 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100179
180 // Adjust for function-level loop nesting.
181 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100182 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Block6ded16b2010-05-10 14:33:55 +0100183
Steve Blocka7e24c12009-10-30 11:49:00 +0000184 {
185 CodeGenState state(this);
186
187 // Entry:
188 // Stack: receiver, arguments
189 // lr: return address
190 // fp: caller's frame pointer
191 // sp: stack pointer
192 // r1: called JS function
193 // cp: callee's context
194 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000195
Steve Blocka7e24c12009-10-30 11:49:00 +0000196#ifdef DEBUG
197 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000198 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 frame_->SpillAll();
200 __ stop("stop-at");
201 }
202#endif
203
Iain Merrick75681382010-08-19 15:07:18 +0100204 frame_->Enter();
205 // tos: code slot
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 // Allocate space for locals and initialize them. This also checks
208 // for stack overflow.
209 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000210
Iain Merrick75681382010-08-19 15:07:18 +0100211 frame_->AssertIsSpilled();
212 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
213 if (heap_slots > 0) {
214 // Allocate local context.
215 // Get outer context and create a new context based on it.
216 __ ldr(r0, frame_->Function());
217 frame_->EmitPush(r0);
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
220 frame_->CallStub(&stub, 1);
221 } else {
222 frame_->CallRuntime(Runtime::kNewContext, 1);
223 }
Leon Clarke4515c472010-02-03 11:58:03 +0000224
225#ifdef DEBUG
Iain Merrick75681382010-08-19 15:07:18 +0100226 JumpTarget verified_true;
227 __ cmp(r0, cp);
228 verified_true.Branch(eq);
229 __ stop("NewContext: r0 is expected to be the same as cp");
230 verified_true.Bind();
Leon Clarke4515c472010-02-03 11:58:03 +0000231#endif
Iain Merrick75681382010-08-19 15:07:18 +0100232 // Update context local.
233 __ str(cp, frame_->Context());
234 }
Leon Clarke4515c472010-02-03 11:58:03 +0000235
Iain Merrick75681382010-08-19 15:07:18 +0100236 // TODO(1241774): Improve this code:
237 // 1) only needed if we have a context
238 // 2) no need to recompute context ptr every single time
239 // 3) don't copy parameter operand code from SlotOperand!
240 {
241 Comment cmnt2(masm_, "[ copy context parameters into .context");
242 // Note that iteration order is relevant here! If we have the same
243 // parameter twice (e.g., function (x, y, x)), and that parameter
244 // needs to be copied into the context, it must be the last argument
245 // passed to the parameter that needs to be copied. This is a rare
246 // case so we don't check for it, instead we rely on the copying
247 // order: such a parameter is copied repeatedly into the same
248 // context location and thus the last value is what is seen inside
249 // the function.
250 frame_->AssertIsSpilled();
251 for (int i = 0; i < scope()->num_parameters(); i++) {
252 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100253 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100254 if (slot != NULL && slot->type() == Slot::CONTEXT) {
255 ASSERT(!scope()->is_global_scope()); // No params in global scope.
256 __ ldr(r1, frame_->ParameterAt(i));
257 // Loads r2 with context; used below in RecordWrite.
258 __ str(r1, SlotOperand(slot, r2));
259 // Load the offset into r3.
260 int slot_offset =
261 FixedArray::kHeaderSize + slot->index() * kPointerSize;
262 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000263 }
264 }
Iain Merrick75681382010-08-19 15:07:18 +0100265 }
Leon Clarke4515c472010-02-03 11:58:03 +0000266
Iain Merrick75681382010-08-19 15:07:18 +0100267 // Store the arguments object. This must happen after context
268 // initialization because the arguments object may be stored in
269 // the context.
270 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
271 StoreArgumentsObject(true);
272 }
Leon Clarke4515c472010-02-03 11:58:03 +0000273
Iain Merrick75681382010-08-19 15:07:18 +0100274 // Initialize ThisFunction reference if present.
275 if (scope()->is_function_scope() && scope()->function() != NULL) {
276 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100277 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000278 }
279
Steve Blocka7e24c12009-10-30 11:49:00 +0000280 // Initialize the function return target after the locals are set
281 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100282 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000283 function_return_is_shadowed_ = false;
284
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 // Generate code to 'execute' declarations and initialize functions
286 // (source elements). In case of an illegal redeclaration we need to
287 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000288 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000289 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000290 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 } else {
292 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000293 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000294 // Bail out if a stack-overflow exception occurred when processing
295 // declarations.
296 if (HasStackOverflow()) return;
297 }
298
299 if (FLAG_trace) {
300 frame_->CallRuntime(Runtime::kTraceEnter, 0);
301 // Ignore the return value.
302 }
303
304 // Compile the body of the function in a vanilla state. Don't
305 // bother compiling all the code if the scope has an illegal
306 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000307 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000308 Comment cmnt(masm_, "[ function body");
309#ifdef DEBUG
310 bool is_builtin = Bootstrapper::IsActive();
311 bool should_trace =
312 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
313 if (should_trace) {
314 frame_->CallRuntime(Runtime::kDebugTrace, 0);
315 // Ignore the return value.
316 }
317#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100318 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 }
320 }
321
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100322 // Handle the return from the function.
323 if (has_valid_frame()) {
324 // If there is a valid frame, control flow can fall off the end of
325 // the body. In that case there is an implicit return statement.
326 ASSERT(!function_return_is_shadowed_);
327 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000328 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100329 if (function_return_.is_bound()) {
330 function_return_.Jump();
331 } else {
332 function_return_.Bind();
333 GenerateReturnSequence();
334 }
335 } else if (function_return_.is_linked()) {
336 // If the return target has dangling jumps to it, then we have not
337 // yet generated the return sequence. This can happen when (a)
338 // control does not flow off the end of the body so we did not
339 // compile an artificial return statement just above, and (b) there
340 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000341 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100342 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000343 }
344
Steve Block6ded16b2010-05-10 14:33:55 +0100345 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100346 ASSERT(loop_nesting_ == info->is_in_loop()? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100347 loop_nesting_ = 0;
348
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 // Code generation state must be reset.
350 ASSERT(!has_cc());
351 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100352 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000353 ASSERT(!function_return_is_shadowed_);
354 function_return_.Unuse();
355 DeleteFrame();
356
357 // Process any deferred code using the register allocator.
358 if (!HasStackOverflow()) {
359 ProcessDeferred();
360 }
361
362 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100363 type_info_ = NULL;
364}
365
366
367int CodeGenerator::NumberOfSlot(Slot* slot) {
368 if (slot == NULL) return kInvalidSlotNumber;
369 switch (slot->type()) {
370 case Slot::PARAMETER:
371 return slot->index();
372 case Slot::LOCAL:
373 return slot->index() + scope()->num_parameters();
374 default:
375 break;
376 }
377 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000378}
379
380
381MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
382 // Currently, this assertion will fail if we try to assign to
383 // a constant variable that is constant because it is read-only
384 // (such as the variable referring to a named function expression).
385 // We need to implement assignments to read-only variables.
386 // Ideally, we should do this during AST generation (by converting
387 // such assignments into expression statements); however, in general
388 // we may not be able to make the decision until past AST generation,
389 // that is when the entire program is known.
390 ASSERT(slot != NULL);
391 int index = slot->index();
392 switch (slot->type()) {
393 case Slot::PARAMETER:
394 return frame_->ParameterAt(index);
395
396 case Slot::LOCAL:
397 return frame_->LocalAt(index);
398
399 case Slot::CONTEXT: {
400 // Follow the context chain if necessary.
401 ASSERT(!tmp.is(cp)); // do not overwrite context register
402 Register context = cp;
403 int chain_length = scope()->ContextChainLength(slot->var()->scope());
404 for (int i = 0; i < chain_length; i++) {
405 // Load the closure.
406 // (All contexts, even 'with' contexts, have a closure,
407 // and it is the same for all contexts inside a function.
408 // There is no need to go to the function context first.)
409 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
410 // Load the function context (which is the incoming, outer context).
411 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
412 context = tmp;
413 }
414 // We may have a 'with' context now. Get the function context.
415 // (In fact this mov may never be the needed, since the scope analysis
416 // may not permit a direct context access in this case and thus we are
417 // always at a function context. However it is safe to dereference be-
418 // cause the function context of a function context is itself. Before
419 // deleting this mov we should try to create a counter-example first,
420 // though...)
421 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
422 return ContextOperand(tmp, index);
423 }
424
425 default:
426 UNREACHABLE();
427 return MemOperand(r0, 0);
428 }
429}
430
431
432MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
433 Slot* slot,
434 Register tmp,
435 Register tmp2,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 Register context = cp;
439
440 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
441 if (s->num_heap_slots() > 0) {
442 if (s->calls_eval()) {
443 // Check that extension is NULL.
444 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
445 __ tst(tmp2, tmp2);
446 slow->Branch(ne);
447 }
448 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
449 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
450 context = tmp;
451 }
452 }
453 // Check that last extension is NULL.
454 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
455 __ tst(tmp2, tmp2);
456 slow->Branch(ne);
457 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp, slot->index());
459}
460
461
462// Loads a value on TOS. If it is a boolean value, the result may have been
463// (partially) translated into branches, or it may have set the condition
464// code register. If force_cc is set, the value is forced to set the
465// condition code register and no value is pushed. If the condition code
466// register was set, has_cc() is true and cc_reg_ contains the condition to
467// test for 'true'.
468void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 JumpTarget* true_target,
470 JumpTarget* false_target,
471 bool force_cc) {
472 ASSERT(!has_cc());
473 int original_height = frame_->height();
474
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100475 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000476 Visit(x);
477
478 // If we hit a stack overflow, we may not have actually visited
479 // the expression. In that case, we ensure that we have a
480 // valid-looking frame state because we will continue to generate
481 // code as we unwind the C++ stack.
482 //
483 // It's possible to have both a stack overflow and a valid frame
484 // state (eg, a subexpression overflowed, visiting it returned
485 // with a dummied frame state, and visiting this expression
486 // returned with a normal-looking state).
487 if (HasStackOverflow() &&
488 has_valid_frame() &&
489 !has_cc() &&
490 frame_->height() == original_height) {
491 true_target->Jump();
492 }
493 }
494 if (force_cc && frame_ != NULL && !has_cc()) {
495 // Convert the TOS value to a boolean in the condition code register.
496 ToBoolean(true_target, false_target);
497 }
498 ASSERT(!force_cc || !has_valid_frame() || has_cc());
499 ASSERT(!has_valid_frame() ||
500 (has_cc() && frame_->height() == original_height) ||
501 (!has_cc() && frame_->height() == original_height + 1));
502}
503
504
Steve Blockd0582a62009-12-15 09:54:21 +0000505void CodeGenerator::Load(Expression* expr) {
Iain Merrick75681382010-08-19 15:07:18 +0100506 // We generally assume that we are not in a spilled scope for most
507 // of the code generator. A failure to ensure this caused issue 815
508 // and this assert is designed to catch similar issues.
509 frame_->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000510#ifdef DEBUG
511 int original_height = frame_->height();
512#endif
513 JumpTarget true_target;
514 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000515 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000516
517 if (has_cc()) {
518 // Convert cc_reg_ into a boolean value.
519 JumpTarget loaded;
520 JumpTarget materialize_true;
521 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100522 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000523 loaded.Jump();
524 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100525 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 loaded.Bind();
527 cc_reg_ = al;
528 }
529
530 if (true_target.is_linked() || false_target.is_linked()) {
531 // We have at least one condition value that has been "translated"
532 // into a branch, thus it needs to be loaded explicitly.
533 JumpTarget loaded;
534 if (frame_ != NULL) {
535 loaded.Jump(); // Don't lose the current TOS.
536 }
537 bool both = true_target.is_linked() && false_target.is_linked();
538 // Load "true" if necessary.
539 if (true_target.is_linked()) {
540 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100541 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000542 }
543 // If both "true" and "false" need to be loaded jump across the code for
544 // "false".
545 if (both) {
546 loaded.Jump();
547 }
548 // Load "false" if necessary.
549 if (false_target.is_linked()) {
550 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100551 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000552 }
553 // A value is loaded on all paths reaching this point.
554 loaded.Bind();
555 }
556 ASSERT(has_valid_frame());
557 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100558 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000559}
560
561
562void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100563 Register reg = frame_->GetTOSRegister();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800564 __ ldr(reg, GlobalObjectOperand());
Steve Block6ded16b2010-05-10 14:33:55 +0100565 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566}
567
568
569void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100570 Register reg = frame_->GetTOSRegister();
571 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
572 __ ldr(reg,
573 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
574 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575}
576
577
Steve Block6ded16b2010-05-10 14:33:55 +0100578ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
579 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
580 ASSERT(scope()->arguments_shadow() != NULL);
581 // We don't want to do lazy arguments allocation for functions that
582 // have heap-allocated contexts, because it interfers with the
583 // uninitialized const tracking in the context objects.
584 return (scope()->num_heap_slots() > 0)
585 ? EAGER_ARGUMENTS_ALLOCATION
586 : LAZY_ARGUMENTS_ALLOCATION;
587}
588
589
590void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100591 ArgumentsAllocationMode mode = ArgumentsMode();
592 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
593
594 Comment cmnt(masm_, "[ store arguments object");
595 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
596 // When using lazy arguments allocation, we store the hole value
597 // as a sentinel indicating that the arguments object hasn't been
598 // allocated yet.
Steve Block8defd9f2010-07-08 12:39:36 +0100599 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100600 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100601 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +0100602 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
603 __ ldr(r2, frame_->Function());
604 // The receiver is below the arguments, the return address, and the
605 // frame pointer on the stack.
606 const int kReceiverDisplacement = 2 + scope()->num_parameters();
607 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
608 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
609 frame_->Adjust(3);
610 __ Push(r2, r1, r0);
611 frame_->CallStub(&stub, 3);
612 frame_->EmitPush(r0);
613 }
614
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100615 Variable* arguments = scope()->arguments();
616 Variable* shadow = scope()->arguments_shadow();
617 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
618 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100619 JumpTarget done;
620 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
621 // We have to skip storing into the arguments slot if it has
622 // already been written to. This can happen if the a function
623 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100624 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100625 Register arguments = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +0100626 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100627 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100628 done.Branch(ne);
629 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100630 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100631 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100632 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100633}
634
635
Steve Blockd0582a62009-12-15 09:54:21 +0000636void CodeGenerator::LoadTypeofExpression(Expression* expr) {
637 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000638 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000640 // For a global variable we build the property reference
641 // <global>.<variable> and perform a (regular non-contextual) property
642 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
644 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000646 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100647 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100648 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000649 // For a variable that rewrites to a slot, we signal it is the immediate
650 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100651 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000653 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100654 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000655 }
656}
657
658
Leon Clarked91b9f72010-01-27 17:25:45 +0000659Reference::Reference(CodeGenerator* cgen,
660 Expression* expression,
661 bool persist_after_get)
662 : cgen_(cgen),
663 expression_(expression),
664 type_(ILLEGAL),
665 persist_after_get_(persist_after_get) {
Iain Merrick75681382010-08-19 15:07:18 +0100666 // We generally assume that we are not in a spilled scope for most
667 // of the code generator. A failure to ensure this caused issue 815
668 // and this assert is designed to catch similar issues.
669 cgen->frame()->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000670 cgen->LoadReference(this);
671}
672
673
674Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000675 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000676}
677
678
679void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 Comment cmnt(masm_, "[ LoadReference");
681 Expression* e = ref->expression();
682 Property* property = e->AsProperty();
683 Variable* var = e->AsVariableProxy()->AsVariable();
684
685 if (property != NULL) {
686 // The expression is either a property or a variable proxy that rewrites
687 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100688 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000689 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 ref->set_type(Reference::NAMED);
691 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100692 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 ref->set_type(Reference::KEYED);
694 }
695 } else if (var != NULL) {
696 // The expression is a variable proxy that does not rewrite to a
697 // property. Global variables are treated as named property references.
698 if (var->is_global()) {
699 LoadGlobal();
700 ref->set_type(Reference::NAMED);
701 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100702 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 ref->set_type(Reference::SLOT);
704 }
705 } else {
706 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100707 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
709 }
710}
711
712
713void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000715 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100716 if (size == 0) return;
717
718 // Pop a reference from the stack while preserving TOS.
719 VirtualFrame::RegisterAllocationScope scope(this);
720 Comment cmnt(masm_, "[ UnloadReference");
721 if (size > 0) {
722 Register tos = frame_->PopToRegister();
723 frame_->Drop(size);
724 frame_->EmitPush(tos);
725 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000726}
727
728
729// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
730// register to a boolean in the condition code register. The code
731// may jump to 'false_target' in case the register converts to 'false'.
732void CodeGenerator::ToBoolean(JumpTarget* true_target,
733 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 // Note: The generated code snippet does not change stack variables.
735 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100736 bool known_smi = frame_->KnownSmiAt(0);
737 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000738
739 // Fast case checks
740
741 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100742 if (!known_smi) {
743 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
744 __ cmp(tos, ip);
745 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746
Steve Block8defd9f2010-07-08 12:39:36 +0100747 // Check if the value is 'true'.
748 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
749 __ cmp(tos, ip);
750 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000751
Steve Block8defd9f2010-07-08 12:39:36 +0100752 // Check if the value is 'undefined'.
753 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
754 __ cmp(tos, ip);
755 false_target->Branch(eq);
756 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000757
758 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100759 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000760
Steve Block8defd9f2010-07-08 12:39:36 +0100761 if (!known_smi) {
762 false_target->Branch(eq);
763 __ tst(tos, Operand(kSmiTagMask));
764 true_target->Branch(eq);
765
Iain Merrick75681382010-08-19 15:07:18 +0100766 // Slow case.
767 if (CpuFeatures::IsSupported(VFP3)) {
768 CpuFeatures::Scope scope(VFP3);
769 // Implements the slow case by using ToBooleanStub.
770 // The ToBooleanStub takes a single argument, and
771 // returns a non-zero value for true, or zero for false.
772 // Both the argument value and the return value use the
773 // register assigned to tos_
774 ToBooleanStub stub(tos);
775 frame_->CallStub(&stub, 0);
776 // Convert the result in "tos" to a condition code.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100777 __ cmp(tos, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +0100778 } else {
779 // Implements slow case by calling the runtime.
780 frame_->EmitPush(tos);
781 frame_->CallRuntime(Runtime::kToBool, 1);
782 // Convert the result (r0) to a condition code.
783 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
784 __ cmp(r0, ip);
785 }
Steve Block8defd9f2010-07-08 12:39:36 +0100786 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000787
788 cc_reg_ = ne;
789}
790
791
792void CodeGenerator::GenericBinaryOperation(Token::Value op,
793 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100794 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100796 // top of virtual frame: y
797 // 2nd elt. on virtual frame : x
798 // result : top of virtual frame
799
800 // Stub is entered with a call: 'return address' is in lr.
801 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100802 case Token::ADD:
803 case Token::SUB:
804 if (inline_smi) {
805 JumpTarget done;
806 Register rhs = frame_->PopToRegister();
807 Register lhs = frame_->PopToRegister(rhs);
808 Register scratch = VirtualFrame::scratch0();
809 __ orr(scratch, rhs, Operand(lhs));
810 // Check they are both small and positive.
811 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
812 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100813 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100814 if (op == Token::ADD) {
815 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
816 } else {
817 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
818 }
819 done.Branch(eq);
820 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
821 frame_->SpillAll();
822 frame_->CallStub(&stub, 0);
823 done.Bind();
824 frame_->EmitPush(r0);
825 break;
826 } else {
827 // Fall through!
828 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 case Token::BIT_OR:
830 case Token::BIT_AND:
831 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100832 if (inline_smi) {
833 bool rhs_is_smi = frame_->KnownSmiAt(0);
834 bool lhs_is_smi = frame_->KnownSmiAt(1);
835 Register rhs = frame_->PopToRegister();
836 Register lhs = frame_->PopToRegister(rhs);
837 Register smi_test_reg;
838 Condition cond;
839 if (!rhs_is_smi || !lhs_is_smi) {
840 if (rhs_is_smi) {
841 smi_test_reg = lhs;
842 } else if (lhs_is_smi) {
843 smi_test_reg = rhs;
844 } else {
845 smi_test_reg = VirtualFrame::scratch0();
846 __ orr(smi_test_reg, rhs, Operand(lhs));
847 }
848 // Check they are both Smis.
849 __ tst(smi_test_reg, Operand(kSmiTagMask));
850 cond = eq;
851 } else {
852 cond = al;
853 }
854 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
855 if (op == Token::BIT_OR) {
856 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
857 } else if (op == Token::BIT_AND) {
858 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
859 } else {
860 ASSERT(op == Token::BIT_XOR);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100861 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100862 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
863 }
864 if (cond != al) {
865 JumpTarget done;
866 done.Branch(cond);
867 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
868 frame_->SpillAll();
869 frame_->CallStub(&stub, 0);
870 done.Bind();
871 }
872 frame_->EmitPush(r0);
873 break;
874 } else {
875 // Fall through!
876 }
877 case Token::MUL:
878 case Token::DIV:
879 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000880 case Token::SHL:
881 case Token::SHR:
882 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100883 Register rhs = frame_->PopToRegister();
884 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100885 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
886 frame_->SpillAll();
887 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100888 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 break;
890 }
891
Steve Block6ded16b2010-05-10 14:33:55 +0100892 case Token::COMMA: {
893 Register scratch = frame_->PopToRegister();
894 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100896 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000897 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100898 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000899
900 default:
901 // Other cases should have been handled before this point.
902 UNREACHABLE();
903 break;
904 }
905}
906
907
908class DeferredInlineSmiOperation: public DeferredCode {
909 public:
910 DeferredInlineSmiOperation(Token::Value op,
911 int value,
912 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100913 OverwriteMode overwrite_mode,
914 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000915 : op_(op),
916 value_(value),
917 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100918 overwrite_mode_(overwrite_mode),
919 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 set_comment("[ DeferredInlinedSmiOperation");
921 }
922
923 virtual void Generate();
Iain Merrick9ac36c92010-09-13 15:29:50 +0100924 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
925 // Exit(). Currently on ARM SaveRegisters() and RestoreRegisters() are empty
926 // methods, it is the responsibility of the deferred code to save and restore
927 // registers.
928 virtual bool AutoSaveAndRestore() { return false; }
929
930 void JumpToNonSmiInput(Condition cond);
931 void JumpToAnswerOutOfRange(Condition cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000932
933 private:
Iain Merrick9ac36c92010-09-13 15:29:50 +0100934 void GenerateNonSmiInput();
935 void GenerateAnswerOutOfRange();
936 void WriteNonSmiAnswer(Register answer,
937 Register heap_number,
938 Register scratch);
939
Steve Blocka7e24c12009-10-30 11:49:00 +0000940 Token::Value op_;
941 int value_;
942 bool reversed_;
943 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100944 Register tos_register_;
Iain Merrick9ac36c92010-09-13 15:29:50 +0100945 Label non_smi_input_;
946 Label answer_out_of_range_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000947};
948
949
Iain Merrick9ac36c92010-09-13 15:29:50 +0100950// For bit operations we try harder and handle the case where the input is not
951// a Smi but a 32bits integer without calling the generic stub.
952void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
953 ASSERT(Token::IsBitOp(op_));
954
955 __ b(cond, &non_smi_input_);
956}
957
958
959// For bit operations the result is always 32bits so we handle the case where
960// the result does not fit in a Smi without calling the generic stub.
961void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
962 ASSERT(Token::IsBitOp(op_));
963
964 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) {
965 // >>> requires an unsigned to double conversion and the non VFP code
966 // does not support this conversion.
967 __ b(cond, entry_label());
968 } else {
969 __ b(cond, &answer_out_of_range_);
970 }
971}
972
Steve Block8defd9f2010-07-08 12:39:36 +0100973
974// On entry the non-constant side of the binary operation is in tos_register_
975// and the constant smi side is nowhere. The tos_register_ is not used by the
976// virtual frame. On exit the answer is in the tos_register_ and the virtual
977// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000978void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100979 VirtualFrame copied_frame(*frame_state()->frame());
980 copied_frame.SpillAll();
981
Steve Block6ded16b2010-05-10 14:33:55 +0100982 Register lhs = r1;
983 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000984 switch (op_) {
985 case Token::ADD: {
986 // Revert optimistic add.
987 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100988 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 __ mov(r1, Operand(Smi::FromInt(value_)));
990 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100991 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000992 __ mov(r0, Operand(Smi::FromInt(value_)));
993 }
994 break;
995 }
996
997 case Token::SUB: {
998 // Revert optimistic sub.
999 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +01001000 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001001 __ mov(r1, Operand(Smi::FromInt(value_)));
1002 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001003 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001004 __ mov(r0, Operand(Smi::FromInt(value_)));
1005 }
1006 break;
1007 }
1008
1009 // For these operations there is no optimistic operation that needs to be
1010 // reverted.
1011 case Token::MUL:
1012 case Token::MOD:
1013 case Token::BIT_OR:
1014 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +01001015 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +00001016 case Token::SHL:
1017 case Token::SHR:
1018 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +01001019 if (tos_register_.is(r1)) {
1020 __ mov(r0, Operand(Smi::FromInt(value_)));
1021 } else {
1022 ASSERT(tos_register_.is(r0));
1023 __ mov(r1, Operand(Smi::FromInt(value_)));
1024 }
1025 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +01001026 lhs = r0;
1027 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 }
1029 break;
1030 }
1031
1032 default:
1033 // Other cases should have been handled before this point.
1034 UNREACHABLE();
1035 break;
1036 }
1037
Steve Block6ded16b2010-05-10 14:33:55 +01001038 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001040
Steve Block6ded16b2010-05-10 14:33:55 +01001041 // The generic stub returns its value in r0, but that's not
1042 // necessarily what we want. We want whatever the inlined code
1043 // expected, which is that the answer is in the same register as
1044 // the operand was.
1045 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001046
1047 // The tos register was not in use for the virtual frame that we
1048 // came into this function with, so we can merge back to that frame
1049 // without trashing it.
1050 copied_frame.MergeTo(frame_state()->frame());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001051
1052 Exit();
1053
1054 if (non_smi_input_.is_linked()) {
1055 GenerateNonSmiInput();
1056 }
1057
1058 if (answer_out_of_range_.is_linked()) {
1059 GenerateAnswerOutOfRange();
1060 }
1061}
1062
1063
1064// Convert and write the integer answer into heap_number.
1065void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1066 Register heap_number,
1067 Register scratch) {
1068 if (CpuFeatures::IsSupported(VFP3)) {
1069 CpuFeatures::Scope scope(VFP3);
1070 __ vmov(s0, answer);
1071 if (op_ == Token::SHR) {
1072 __ vcvt_f64_u32(d0, s0);
1073 } else {
1074 __ vcvt_f64_s32(d0, s0);
1075 }
1076 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1077 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1078 } else {
1079 WriteInt32ToHeapNumberStub stub(answer, heap_number, scratch);
1080 __ CallStub(&stub);
1081 }
1082}
1083
1084
1085void DeferredInlineSmiOperation::GenerateNonSmiInput() {
1086 // We know the left hand side is not a Smi and the right hand side is an
1087 // immediate value (value_) which can be represented as a Smi. We only
1088 // handle bit operations.
1089 ASSERT(Token::IsBitOp(op_));
1090
1091 if (FLAG_debug_code) {
1092 __ Abort("Should not fall through!");
1093 }
1094
1095 __ bind(&non_smi_input_);
1096 if (FLAG_debug_code) {
1097 __ AbortIfSmi(tos_register_);
1098 }
1099
1100 // This routine uses the registers from r2 to r6. At the moment they are
1101 // not used by the register allocator, but when they are it should use
1102 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1103
1104 Register heap_number_map = r7;
1105 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1106 __ ldr(r3, FieldMemOperand(tos_register_, HeapNumber::kMapOffset));
1107 __ cmp(r3, heap_number_map);
1108 // Not a number, fall back to the GenericBinaryOpStub.
1109 __ b(ne, entry_label());
1110
1111 Register int32 = r2;
1112 // Not a 32bits signed int, fall back to the GenericBinaryOpStub.
1113 __ ConvertToInt32(tos_register_, int32, r4, r5, entry_label());
1114
1115 // tos_register_ (r0 or r1): Original heap number.
1116 // int32: signed 32bits int.
1117
1118 Label result_not_a_smi;
1119 int shift_value = value_ & 0x1f;
1120 switch (op_) {
1121 case Token::BIT_OR: __ orr(int32, int32, Operand(value_)); break;
1122 case Token::BIT_XOR: __ eor(int32, int32, Operand(value_)); break;
1123 case Token::BIT_AND: __ and_(int32, int32, Operand(value_)); break;
1124 case Token::SAR:
1125 ASSERT(!reversed_);
1126 if (shift_value != 0) {
1127 __ mov(int32, Operand(int32, ASR, shift_value));
1128 }
1129 break;
1130 case Token::SHR:
1131 ASSERT(!reversed_);
1132 if (shift_value != 0) {
1133 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1134 } else {
1135 // SHR is special because it is required to produce a positive answer.
1136 __ cmp(int32, Operand(0, RelocInfo::NONE));
1137 }
1138 if (CpuFeatures::IsSupported(VFP3)) {
1139 __ b(mi, &result_not_a_smi);
1140 } else {
1141 // Non VFP code cannot convert from unsigned to double, so fall back
1142 // to GenericBinaryOpStub.
1143 __ b(mi, entry_label());
1144 }
1145 break;
1146 case Token::SHL:
1147 ASSERT(!reversed_);
1148 if (shift_value != 0) {
1149 __ mov(int32, Operand(int32, LSL, shift_value));
1150 }
1151 break;
1152 default: UNREACHABLE();
1153 }
1154 // Check that the *signed* result fits in a smi. Not necessary for AND, SAR
1155 // if the shift if more than 0 or SHR if the shit is more than 1.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 if (!( (op_ == Token::AND) ||
Iain Merrick9ac36c92010-09-13 15:29:50 +01001157 ((op_ == Token::SAR) && (shift_value > 0)) ||
1158 ((op_ == Token::SHR) && (shift_value > 1)))) {
1159 __ add(r3, int32, Operand(0x40000000), SetCC);
1160 __ b(mi, &result_not_a_smi);
1161 }
1162 __ mov(tos_register_, Operand(int32, LSL, kSmiTagSize));
1163 Exit();
1164
1165 if (result_not_a_smi.is_linked()) {
1166 __ bind(&result_not_a_smi);
1167 if (overwrite_mode_ != OVERWRITE_LEFT) {
1168 ASSERT((overwrite_mode_ == NO_OVERWRITE) ||
1169 (overwrite_mode_ == OVERWRITE_RIGHT));
1170 // If the allocation fails, fall back to the GenericBinaryOpStub.
1171 __ AllocateHeapNumber(r4, r5, r6, heap_number_map, entry_label());
1172 // Nothing can go wrong now, so overwrite tos.
1173 __ mov(tos_register_, Operand(r4));
1174 }
1175
1176 // int32: answer as signed 32bits integer.
1177 // tos_register_: Heap number to write the answer into.
1178 WriteNonSmiAnswer(int32, tos_register_, r3);
1179
1180 Exit();
1181 }
1182}
1183
1184
1185void DeferredInlineSmiOperation::GenerateAnswerOutOfRange() {
1186 // The input from a bitwise operation were Smis but the result cannot fit
John Reck59135872010-11-02 12:39:01 -07001187 // into a Smi, so we store it into a heap number. VirtualFrame::scratch0()
1188 // holds the untagged result to be converted. tos_register_ contains the
1189 // input. See the calls to JumpToAnswerOutOfRange to see how we got here.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001190 ASSERT(Token::IsBitOp(op_));
1191 ASSERT(!reversed_);
1192
John Reck59135872010-11-02 12:39:01 -07001193 Register untagged_result = VirtualFrame::scratch0();
1194
Iain Merrick9ac36c92010-09-13 15:29:50 +01001195 if (FLAG_debug_code) {
1196 __ Abort("Should not fall through!");
1197 }
1198
1199 __ bind(&answer_out_of_range_);
1200 if (((value_ & 0x1f) == 0) && (op_ == Token::SHR)) {
John Reck59135872010-11-02 12:39:01 -07001201 // >>> 0 is a special case where the untagged_result register is not set up
1202 // yet. We untag the input to get it.
1203 __ mov(untagged_result, Operand(tos_register_, ASR, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001204 }
1205
1206 // This routine uses the registers from r2 to r6. At the moment they are
1207 // not used by the register allocator, but when they are it should use
1208 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1209
1210 // Allocate the result heap number.
John Reck59135872010-11-02 12:39:01 -07001211 Register heap_number_map = VirtualFrame::scratch1();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001212 Register heap_number = r4;
1213 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1214 // If the allocation fails, fall back to the GenericBinaryOpStub.
1215 __ AllocateHeapNumber(heap_number, r5, r6, heap_number_map, entry_label());
John Reck59135872010-11-02 12:39:01 -07001216 WriteNonSmiAnswer(untagged_result, heap_number, r3);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001217 __ mov(tos_register_, Operand(heap_number));
1218
1219 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001220}
1221
1222
1223static bool PopCountLessThanEqual2(unsigned int x) {
1224 x &= x - 1;
1225 return (x & (x - 1)) == 0;
1226}
1227
1228
1229// Returns the index of the lowest bit set.
1230static int BitPosition(unsigned x) {
1231 int bit_posn = 0;
1232 while ((x & 0xf) == 0) {
1233 bit_posn += 4;
1234 x >>= 4;
1235 }
1236 while ((x & 1) == 0) {
1237 bit_posn++;
1238 x >>= 1;
1239 }
1240 return bit_posn;
1241}
1242
1243
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001244// Can we multiply by x with max two shifts and an add.
1245// This answers yes to all integers from 2 to 10.
1246static bool IsEasyToMultiplyBy(int x) {
1247 if (x < 2) return false; // Avoid special cases.
1248 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
1249 if (IsPowerOf2(x)) return true; // Simple shift.
1250 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
1251 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
1252 return false;
1253}
1254
1255
1256// Can multiply by anything that IsEasyToMultiplyBy returns true for.
1257// Source and destination may be the same register. This routine does
1258// not set carry and overflow the way a mul instruction would.
1259static void InlineMultiplyByKnownInt(MacroAssembler* masm,
1260 Register source,
1261 Register destination,
1262 int known_int) {
1263 if (IsPowerOf2(known_int)) {
1264 masm->mov(destination, Operand(source, LSL, BitPosition(known_int)));
1265 } else if (PopCountLessThanEqual2(known_int)) {
1266 int first_bit = BitPosition(known_int);
1267 int second_bit = BitPosition(known_int ^ (1 << first_bit));
1268 masm->add(destination, source,
1269 Operand(source, LSL, second_bit - first_bit));
1270 if (first_bit != 0) {
1271 masm->mov(destination, Operand(destination, LSL, first_bit));
1272 }
1273 } else {
1274 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
1275 int the_bit = BitPosition(known_int + 1);
1276 masm->rsb(destination, source, Operand(source, LSL, the_bit));
1277 }
1278}
1279
1280
Steve Blocka7e24c12009-10-30 11:49:00 +00001281void CodeGenerator::SmiOperation(Token::Value op,
1282 Handle<Object> value,
1283 bool reversed,
1284 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001285 int int_value = Smi::cast(*value)->value();
1286
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001287 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1288
Steve Block6ded16b2010-05-10 14:33:55 +01001289 bool something_to_inline;
1290 switch (op) {
1291 case Token::ADD:
1292 case Token::SUB:
1293 case Token::BIT_AND:
1294 case Token::BIT_OR:
1295 case Token::BIT_XOR: {
1296 something_to_inline = true;
1297 break;
1298 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001299 case Token::SHL: {
1300 something_to_inline = (both_sides_are_smi || !reversed);
1301 break;
1302 }
Steve Block6ded16b2010-05-10 14:33:55 +01001303 case Token::SHR:
1304 case Token::SAR: {
1305 if (reversed) {
1306 something_to_inline = false;
1307 } else {
1308 something_to_inline = true;
1309 }
1310 break;
1311 }
1312 case Token::MOD: {
1313 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1314 something_to_inline = false;
1315 } else {
1316 something_to_inline = true;
1317 }
1318 break;
1319 }
1320 case Token::MUL: {
1321 if (!IsEasyToMultiplyBy(int_value)) {
1322 something_to_inline = false;
1323 } else {
1324 something_to_inline = true;
1325 }
1326 break;
1327 }
1328 default: {
1329 something_to_inline = false;
1330 break;
1331 }
1332 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001333
Steve Block6ded16b2010-05-10 14:33:55 +01001334 if (!something_to_inline) {
1335 if (!reversed) {
1336 // Push the rhs onto the virtual frame by putting it in a TOS register.
1337 Register rhs = frame_->GetTOSRegister();
1338 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001339 frame_->EmitPush(rhs, TypeInfo::Smi());
1340 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001341 } else {
1342 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1343 // at most one pop, the rest takes place in TOS registers.
1344 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1345 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1346 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 frame_->EmitPush(lhs, TypeInfo::Smi());
1348 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1349 frame_->EmitPush(rhs, t);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001350 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI,
1351 GenericBinaryOpStub::kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001352 }
1353 return;
1354 }
1355
1356 // We move the top of stack to a register (normally no move is invoved).
1357 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001358 switch (op) {
1359 case Token::ADD: {
1360 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001361 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001362
Steve Block6ded16b2010-05-10 14:33:55 +01001363 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001365 if (!both_sides_are_smi) {
1366 __ tst(tos, Operand(kSmiTagMask));
1367 deferred->Branch(ne);
1368 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001369 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001370 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 break;
1372 }
1373
1374 case Token::SUB: {
1375 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001376 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001377
1378 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001379 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001380 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001381 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 }
1383 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001384 if (!both_sides_are_smi) {
1385 __ tst(tos, Operand(kSmiTagMask));
1386 deferred->Branch(ne);
1387 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001388 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001389 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001390 break;
1391 }
1392
1393
1394 case Token::BIT_OR:
1395 case Token::BIT_XOR:
1396 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001397 if (both_sides_are_smi) {
1398 switch (op) {
1399 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1400 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001401 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001402 default: UNREACHABLE();
1403 }
1404 frame_->EmitPush(tos, TypeInfo::Smi());
1405 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001406 DeferredInlineSmiOperation* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001407 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1408 __ tst(tos, Operand(kSmiTagMask));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001409 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001410 switch (op) {
1411 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1412 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001413 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001414 default: UNREACHABLE();
1415 }
1416 deferred->BindExit();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001417 TypeInfo result_type =
1418 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001419 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001420 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 break;
1422 }
1423
1424 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001425 if (reversed) {
1426 ASSERT(both_sides_are_smi);
1427 int max_shift = 0;
1428 int max_result = int_value == 0 ? 1 : int_value;
1429 while (Smi::IsValid(max_result << 1)) {
1430 max_shift++;
1431 max_result <<= 1;
1432 }
1433 DeferredCode* deferred =
1434 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1435 // Mask off the last 5 bits of the shift operand (rhs). This is part
1436 // of the definition of shift in JS and we know we have a Smi so we
1437 // can safely do this. The masked version gets passed to the
1438 // deferred code, but that makes no difference.
1439 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1440 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1441 deferred->Branch(ge);
1442 Register scratch = VirtualFrame::scratch0();
1443 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1444 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1445 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1446 deferred->BindExit();
1447 TypeInfo result = TypeInfo::Integer32();
1448 frame_->EmitPush(tos, result);
1449 break;
1450 }
1451 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 case Token::SHR:
1453 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001454 ASSERT(!reversed);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001455 int shift_value = int_value & 0x1f;
Steve Block791712a2010-08-27 10:21:07 +01001456 TypeInfo result = TypeInfo::Number();
1457
1458 if (op == Token::SHR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001459 if (shift_value > 1) {
Steve Block791712a2010-08-27 10:21:07 +01001460 result = TypeInfo::Smi();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001461 } else if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001462 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001463 }
Steve Block791712a2010-08-27 10:21:07 +01001464 } else if (op == Token::SAR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001465 if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001466 result = TypeInfo::Smi();
1467 } else {
1468 result = TypeInfo::Integer32();
1469 }
1470 } else {
1471 ASSERT(op == Token::SHL);
1472 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001473 }
Steve Block791712a2010-08-27 10:21:07 +01001474
Iain Merrick9ac36c92010-09-13 15:29:50 +01001475 DeferredInlineSmiOperation* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001476 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001477 if (!both_sides_are_smi) {
1478 __ tst(tos, Operand(kSmiTagMask));
1479 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001480 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001481 switch (op) {
1482 case Token::SHL: {
1483 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001484 Register untagged_result = VirtualFrame::scratch0();
1485 Register scratch = VirtualFrame::scratch1();
Kristian Monsen25f61362010-05-21 11:50:48 +01001486 int adjusted_shift = shift_value - kSmiTagSize;
1487 ASSERT(adjusted_shift >= 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001488
Kristian Monsen25f61362010-05-21 11:50:48 +01001489 if (adjusted_shift != 0) {
John Reck59135872010-11-02 12:39:01 -07001490 __ mov(untagged_result, Operand(tos, LSL, adjusted_shift));
1491 } else {
1492 __ mov(untagged_result, Operand(tos));
Kristian Monsen25f61362010-05-21 11:50:48 +01001493 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001494 // Check that the *signed* result fits in a smi.
John Reck59135872010-11-02 12:39:01 -07001495 __ add(scratch, untagged_result, Operand(0x40000000), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001496 deferred->JumpToAnswerOutOfRange(mi);
John Reck59135872010-11-02 12:39:01 -07001497 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001498 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001499 break;
1500 }
1501 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001502 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001503 Register untagged_result = VirtualFrame::scratch0();
1504 // Remove tag.
1505 __ mov(untagged_result, Operand(tos, ASR, kSmiTagSize));
1506 __ mov(untagged_result, Operand(untagged_result, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001507 if (shift_value == 1) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001508 // Check that the *unsigned* result fits in a smi.
1509 // Neither of the two high-order bits can be set:
Kristian Monsen25f61362010-05-21 11:50:48 +01001510 // - 0x80000000: high bit would be lost when smi tagging
Iain Merrick9ac36c92010-09-13 15:29:50 +01001511 // - 0x40000000: this number would convert to negative when Smi
1512 // tagging.
1513 // These two cases can only happen with shifts by 0 or 1 when
1514 // handed a valid smi.
John Reck59135872010-11-02 12:39:01 -07001515 __ tst(untagged_result, Operand(0xc0000000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001516 deferred->JumpToAnswerOutOfRange(ne);
Kristian Monsen25f61362010-05-21 11:50:48 +01001517 }
John Reck59135872010-11-02 12:39:01 -07001518 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001519 } else {
1520 __ cmp(tos, Operand(0, RelocInfo::NONE));
1521 deferred->JumpToAnswerOutOfRange(mi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001522 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001523 break;
1524 }
1525 case Token::SAR: {
1526 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001527 // Do the shift and the tag removal in one operation. If the shift
Kristian Monsen25f61362010-05-21 11:50:48 +01001528 // is 31 bits (the highest possible value) then we emit the
Iain Merrick9ac36c92010-09-13 15:29:50 +01001529 // instruction as a shift by 0 which in the ARM ISA means shift
1530 // arithmetically by 32.
Kristian Monsen25f61362010-05-21 11:50:48 +01001531 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
Kristian Monsen25f61362010-05-21 11:50:48 +01001532 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001533 }
1534 break;
1535 }
1536 default: UNREACHABLE();
1537 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001539 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001540 break;
1541 }
1542
1543 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001544 ASSERT(!reversed);
1545 ASSERT(int_value >= 2);
1546 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001548 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001550 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001551 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1552 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001553 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001554 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001555 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1556 frame_->EmitPush(
1557 tos,
1558 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001559 break;
1560 }
1561
1562 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001563 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001564 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001565 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1567 max_smi_that_wont_overflow <<= kSmiTagSize;
1568 unsigned mask = 0x80000000u;
1569 while ((mask & max_smi_that_wont_overflow) == 0) {
1570 mask |= mask >> 1;
1571 }
1572 mask |= kSmiTagMask;
1573 // This does a single mask that checks for a too high value in a
1574 // conservative way and for a non-Smi. It also filters out negative
1575 // numbers, unfortunately, but since this code is inline we prefer
1576 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001577 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001578 deferred->Branch(ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001579 InlineMultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001580 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001581 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001582 break;
1583 }
1584
1585 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001586 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001587 break;
1588 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001589}
1590
1591
1592void CodeGenerator::Comparison(Condition cc,
1593 Expression* left,
1594 Expression* right,
1595 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001596 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001597
Steve Block6ded16b2010-05-10 14:33:55 +01001598 if (left != NULL) Load(left);
1599 if (right != NULL) Load(right);
1600
Steve Blocka7e24c12009-10-30 11:49:00 +00001601 // sp[0] : y
1602 // sp[1] : x
1603 // result : cc register
1604
1605 // Strict only makes sense for equality comparisons.
1606 ASSERT(!strict || cc == eq);
1607
Steve Block6ded16b2010-05-10 14:33:55 +01001608 Register lhs;
1609 Register rhs;
1610
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001611 bool lhs_is_smi;
1612 bool rhs_is_smi;
1613
Steve Block6ded16b2010-05-10 14:33:55 +01001614 // We load the top two stack positions into registers chosen by the virtual
1615 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001616 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1617 if (cc == gt || cc == le) {
1618 cc = ReverseCondition(cc);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001619 lhs_is_smi = frame_->KnownSmiAt(0);
1620 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001621 lhs = frame_->PopToRegister();
1622 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001623 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001624 rhs_is_smi = frame_->KnownSmiAt(0);
1625 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001626 rhs = frame_->PopToRegister();
1627 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 }
Steve Block6ded16b2010-05-10 14:33:55 +01001629
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001630 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1631
Steve Block6ded16b2010-05-10 14:33:55 +01001632 ASSERT(rhs.is(r0) || rhs.is(r1));
1633 ASSERT(lhs.is(r0) || lhs.is(r1));
1634
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001635 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001636
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001637 if (!both_sides_are_smi) {
1638 // Now we have the two sides in r0 and r1. We flush any other registers
1639 // because the stub doesn't know about register allocation.
1640 frame_->SpillAll();
1641 Register scratch = VirtualFrame::scratch0();
1642 Register smi_test_reg;
1643 if (lhs_is_smi) {
1644 smi_test_reg = rhs;
1645 } else if (rhs_is_smi) {
1646 smi_test_reg = lhs;
1647 } else {
1648 __ orr(scratch, lhs, Operand(rhs));
1649 smi_test_reg = scratch;
1650 }
1651 __ tst(smi_test_reg, Operand(kSmiTagMask));
1652 JumpTarget smi;
1653 smi.Branch(eq);
1654
1655 // Perform non-smi comparison by stub.
1656 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1657 // We call with 0 args because there are 0 on the stack.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001658 CompareStub stub(cc, strict, NO_SMI_COMPARE_IN_STUB, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001659 frame_->CallStub(&stub, 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001660 __ cmp(r0, Operand(0, RelocInfo::NONE));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001661 exit.Jump();
1662
1663 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001664 }
1665
Steve Blocka7e24c12009-10-30 11:49:00 +00001666 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001667 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001668
1669 exit.Bind();
1670 cc_reg_ = cc;
1671}
1672
1673
Steve Blocka7e24c12009-10-30 11:49:00 +00001674// Call the function on the stack with the given arguments.
1675void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001676 CallFunctionFlags flags,
1677 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001678 // Push the arguments ("left-to-right") on the stack.
1679 int arg_count = args->length();
1680 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001681 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001682 }
1683
1684 // Record the position for debugging purposes.
1685 CodeForSourcePosition(position);
1686
1687 // Use the shared code stub to call the function.
1688 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001689 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001690 frame_->CallStub(&call_function, arg_count + 1);
1691
1692 // Restore context and pop function from the stack.
1693 __ ldr(cp, frame_->Context());
1694 frame_->Drop(); // discard the TOS
1695}
1696
1697
Steve Block6ded16b2010-05-10 14:33:55 +01001698void CodeGenerator::CallApplyLazy(Expression* applicand,
1699 Expression* receiver,
1700 VariableProxy* arguments,
1701 int position) {
1702 // An optimized implementation of expressions of the form
1703 // x.apply(y, arguments).
1704 // If the arguments object of the scope has not been allocated,
1705 // and x.apply is Function.prototype.apply, this optimization
1706 // just copies y and the arguments of the current function on the
1707 // stack, as receiver and arguments, and calls x.
1708 // In the implementation comments, we call x the applicand
1709 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001710
1711 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1712 ASSERT(arguments->IsArguments());
1713
1714 // Load applicand.apply onto the stack. This will usually
1715 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001716 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001717 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001718 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001719 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1720 frame_->EmitPush(r0);
1721
1722 // Load the receiver and the existing arguments object onto the
1723 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001724 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001725 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01001726
Steve Block8defd9f2010-07-08 12:39:36 +01001727 // At this point the top two stack elements are probably in registers
1728 // since they were just loaded. Ensure they are in regs and get the
1729 // regs.
1730 Register receiver_reg = frame_->Peek2();
1731 Register arguments_reg = frame_->Peek();
1732
1733 // From now on the frame is spilled.
1734 frame_->SpillAll();
1735
Steve Block6ded16b2010-05-10 14:33:55 +01001736 // Emit the source position information after having loaded the
1737 // receiver and the arguments.
1738 CodeForSourcePosition(position);
1739 // Contents of the stack at this point:
1740 // sp[0]: arguments object of the current function or the hole.
1741 // sp[1]: receiver
1742 // sp[2]: applicand.apply
1743 // sp[3]: applicand.
1744
1745 // Check if the arguments object has been lazily allocated
1746 // already. If so, just use that instead of copying the arguments
1747 // from the stack. This also deals with cases where a local variable
1748 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001749 JumpTarget slow;
1750 Label done;
Steve Block6ded16b2010-05-10 14:33:55 +01001751 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001752 __ cmp(ip, arguments_reg);
1753 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001754
1755 Label build_args;
1756 // Get rid of the arguments object probe.
1757 frame_->Drop();
1758 // Stack now has 3 elements on it.
1759 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001760 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001761 // sp[1]: applicand.apply
1762 // sp[2]: applicand.
1763
1764 // Check that the receiver really is a JavaScript object.
Steve Block8defd9f2010-07-08 12:39:36 +01001765 __ BranchOnSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001766 // We allow all JSObjects including JSFunctions. As long as
1767 // JS_FUNCTION_TYPE is the last instance type and it is right
1768 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1769 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001770 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1771 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001772 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001773 __ b(lt, &build_args);
1774
1775 // Check that applicand.apply is Function.prototype.apply.
1776 __ ldr(r0, MemOperand(sp, kPointerSize));
1777 __ BranchOnSmi(r0, &build_args);
1778 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1779 __ b(ne, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001780 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01001781 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1782 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01001783 __ cmp(r1, Operand(apply_code));
1784 __ b(ne, &build_args);
1785
1786 // Check that applicand is a function.
1787 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1788 __ BranchOnSmi(r1, &build_args);
1789 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1790 __ b(ne, &build_args);
1791
1792 // Copy the arguments to this function possibly from the
1793 // adaptor frame below it.
1794 Label invoke, adapted;
1795 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1796 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1797 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1798 __ b(eq, &adapted);
1799
1800 // No arguments adaptor frame. Copy fixed number of arguments.
1801 __ mov(r0, Operand(scope()->num_parameters()));
1802 for (int i = 0; i < scope()->num_parameters(); i++) {
1803 __ ldr(r2, frame_->ParameterAt(i));
1804 __ push(r2);
1805 }
1806 __ jmp(&invoke);
1807
1808 // Arguments adaptor frame present. Copy arguments from there, but
1809 // avoid copying too many arguments to avoid stack overflows.
1810 __ bind(&adapted);
1811 static const uint32_t kArgumentsLimit = 1 * KB;
1812 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1813 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1814 __ mov(r3, r0);
1815 __ cmp(r0, Operand(kArgumentsLimit));
1816 __ b(gt, &build_args);
1817
1818 // Loop through the arguments pushing them onto the execution
1819 // stack. We don't inform the virtual frame of the push, so we don't
1820 // have to worry about getting rid of the elements from the virtual
1821 // frame.
1822 Label loop;
1823 // r3 is a small non-negative integer, due to the test above.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001824 __ cmp(r3, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01001825 __ b(eq, &invoke);
1826 // Compute the address of the first argument.
1827 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1828 __ add(r2, r2, Operand(kPointerSize));
1829 __ bind(&loop);
1830 // Post-decrement argument address by kPointerSize on each iteration.
1831 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1832 __ push(r4);
1833 __ sub(r3, r3, Operand(1), SetCC);
1834 __ b(gt, &loop);
1835
1836 // Invoke the function.
1837 __ bind(&invoke);
1838 ParameterCount actual(r0);
1839 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1840 // Drop applicand.apply and applicand from the stack, and push
1841 // the result of the function call, but leave the spilled frame
1842 // unchanged, with 3 elements, so it is correct when we compile the
1843 // slow-case code.
1844 __ add(sp, sp, Operand(2 * kPointerSize));
1845 __ push(r0);
1846 // Stack now has 1 element:
1847 // sp[0]: result
1848 __ jmp(&done);
1849
1850 // Slow-case: Allocate the arguments object since we know it isn't
1851 // there, and fall-through to the slow-case where we call
1852 // applicand.apply.
1853 __ bind(&build_args);
1854 // Stack now has 3 elements, because we have jumped from where:
1855 // sp[0]: receiver
1856 // sp[1]: applicand.apply
1857 // sp[2]: applicand.
1858 StoreArgumentsObject(false);
1859
1860 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001861 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001862
1863 // Generic computation of x.apply(y, args) with no special optimization.
1864 // Flip applicand.apply and applicand on the stack, so
1865 // applicand looks like the receiver of the applicand.apply call.
1866 // Then process it as a normal function call.
1867 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1868 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001869 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001870
1871 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1872 frame_->CallStub(&call_function, 3);
1873 // The function and its two arguments have been dropped.
1874 frame_->Drop(); // Drop the receiver as well.
1875 frame_->EmitPush(r0);
Ben Murdochbb769b22010-08-11 14:56:33 +01001876 frame_->SpillAll(); // A spilled frame is also jumping to label done.
Steve Block6ded16b2010-05-10 14:33:55 +01001877 // Stack now has 1 element:
1878 // sp[0]: result
1879 __ bind(&done);
1880
1881 // Restore the context register after a call.
1882 __ ldr(cp, frame_->Context());
1883}
1884
1885
Steve Blocka7e24c12009-10-30 11:49:00 +00001886void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 ASSERT(has_cc());
1888 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1889 target->Branch(cc);
1890 cc_reg_ = al;
1891}
1892
1893
1894void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001895 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001896 Comment cmnt(masm_, "[ check stack");
1897 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
Steve Blockd0582a62009-12-15 09:54:21 +00001898 masm_->cmp(sp, Operand(ip));
1899 StackCheckStub stub;
1900 // Call the stub if lower.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001901 masm_->mov(ip,
Steve Blockd0582a62009-12-15 09:54:21 +00001902 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1903 RelocInfo::CODE_TARGET),
1904 LeaveCC,
1905 lo);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001906 masm_->Call(ip, lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001907}
1908
1909
1910void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1911#ifdef DEBUG
1912 int original_height = frame_->height();
1913#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001915 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001916 }
1917 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1918}
1919
1920
1921void CodeGenerator::VisitBlock(Block* node) {
1922#ifdef DEBUG
1923 int original_height = frame_->height();
1924#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001925 Comment cmnt(masm_, "[ Block");
1926 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001927 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001928 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001929 if (node->break_target()->is_linked()) {
1930 node->break_target()->Bind();
1931 }
1932 node->break_target()->Unuse();
1933 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1934}
1935
1936
1937void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001938 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001939 frame_->EmitPush(Operand(pairs));
1940 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1941
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1943 // The result is discarded.
1944}
1945
1946
1947void CodeGenerator::VisitDeclaration(Declaration* node) {
1948#ifdef DEBUG
1949 int original_height = frame_->height();
1950#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 Comment cmnt(masm_, "[ Declaration");
1952 Variable* var = node->proxy()->var();
1953 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001954 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00001955
1956 // If it was not possible to allocate the variable at compile time,
1957 // we need to "declare" it at runtime to make sure it actually
1958 // exists in the local context.
1959 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1960 // Variables with a "LOOKUP" slot were introduced as non-locals
1961 // during variable resolution and must have mode DYNAMIC.
1962 ASSERT(var->is_dynamic());
1963 // For now, just do a runtime call.
1964 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001965 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001966 // Declaration nodes are always declared in only two modes.
1967 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1968 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001969 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001970 // Push initial value, if any.
1971 // Note: For variables we must not push an initial value (such as
1972 // 'undefined') because we may have a (legal) redeclaration and we
1973 // must not destroy the current value.
1974 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001975 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001976 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001977 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001978 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001979 frame_->EmitPush(Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001980 }
Steve Block6ded16b2010-05-10 14:33:55 +01001981
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1983 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001984
Steve Blocka7e24c12009-10-30 11:49:00 +00001985 ASSERT(frame_->height() == original_height);
1986 return;
1987 }
1988
1989 ASSERT(!var->is_global());
1990
1991 // If we have a function or a constant, we need to initialize the variable.
1992 Expression* val = NULL;
1993 if (node->mode() == Variable::CONST) {
1994 val = new Literal(Factory::the_hole_value());
1995 } else {
1996 val = node->fun(); // NULL if we don't have a function
1997 }
1998
Steve Block8defd9f2010-07-08 12:39:36 +01001999
Steve Blocka7e24c12009-10-30 11:49:00 +00002000 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01002001 WriteBarrierCharacter wb_info =
2002 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2003 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01002004 // Set initial value.
2005 Reference target(this, node->proxy());
2006 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01002007 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01002008
Steve Blocka7e24c12009-10-30 11:49:00 +00002009 // Get rid of the assigned value (declarations are statements).
2010 frame_->Drop();
2011 }
2012 ASSERT(frame_->height() == original_height);
2013}
2014
2015
2016void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2017#ifdef DEBUG
2018 int original_height = frame_->height();
2019#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002020 Comment cmnt(masm_, "[ ExpressionStatement");
2021 CodeForStatementPosition(node);
2022 Expression* expression = node->expression();
2023 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002024 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00002025 frame_->Drop();
2026 ASSERT(frame_->height() == original_height);
2027}
2028
2029
2030void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2031#ifdef DEBUG
2032 int original_height = frame_->height();
2033#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002034 Comment cmnt(masm_, "// EmptyStatement");
2035 CodeForStatementPosition(node);
2036 // nothing to do
2037 ASSERT(frame_->height() == original_height);
2038}
2039
2040
2041void CodeGenerator::VisitIfStatement(IfStatement* node) {
2042#ifdef DEBUG
2043 int original_height = frame_->height();
2044#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 Comment cmnt(masm_, "[ IfStatement");
2046 // Generate different code depending on which parts of the if statement
2047 // are present or not.
2048 bool has_then_stm = node->HasThenStatement();
2049 bool has_else_stm = node->HasElseStatement();
2050
2051 CodeForStatementPosition(node);
2052
2053 JumpTarget exit;
2054 if (has_then_stm && has_else_stm) {
2055 Comment cmnt(masm_, "[ IfThenElse");
2056 JumpTarget then;
2057 JumpTarget else_;
2058 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002059 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002060 if (frame_ != NULL) {
2061 Branch(false, &else_);
2062 }
2063 // then
2064 if (frame_ != NULL || then.is_linked()) {
2065 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002066 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002067 }
2068 if (frame_ != NULL) {
2069 exit.Jump();
2070 }
2071 // else
2072 if (else_.is_linked()) {
2073 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002074 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002075 }
2076
2077 } else if (has_then_stm) {
2078 Comment cmnt(masm_, "[ IfThen");
2079 ASSERT(!has_else_stm);
2080 JumpTarget then;
2081 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002082 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002083 if (frame_ != NULL) {
2084 Branch(false, &exit);
2085 }
2086 // then
2087 if (frame_ != NULL || then.is_linked()) {
2088 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002089 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002090 }
2091
2092 } else if (has_else_stm) {
2093 Comment cmnt(masm_, "[ IfElse");
2094 ASSERT(!has_then_stm);
2095 JumpTarget else_;
2096 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002097 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002098 if (frame_ != NULL) {
2099 Branch(true, &exit);
2100 }
2101 // else
2102 if (frame_ != NULL || else_.is_linked()) {
2103 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002104 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002105 }
2106
2107 } else {
2108 Comment cmnt(masm_, "[ If");
2109 ASSERT(!has_then_stm && !has_else_stm);
2110 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002111 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002112 if (frame_ != NULL) {
2113 if (has_cc()) {
2114 cc_reg_ = al;
2115 } else {
2116 frame_->Drop();
2117 }
2118 }
2119 }
2120
2121 // end
2122 if (exit.is_linked()) {
2123 exit.Bind();
2124 }
2125 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2126}
2127
2128
2129void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002130 Comment cmnt(masm_, "[ ContinueStatement");
2131 CodeForStatementPosition(node);
2132 node->target()->continue_target()->Jump();
2133}
2134
2135
2136void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002137 Comment cmnt(masm_, "[ BreakStatement");
2138 CodeForStatementPosition(node);
2139 node->target()->break_target()->Jump();
2140}
2141
2142
2143void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002144 Comment cmnt(masm_, "[ ReturnStatement");
2145
2146 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002147 Load(node->expression());
Iain Merrick75681382010-08-19 15:07:18 +01002148 frame_->PopToR0();
2149 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +00002150 if (function_return_is_shadowed_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002151 function_return_.Jump();
2152 } else {
2153 // Pop the result from the frame and prepare the frame for
2154 // returning thus making it easier to merge.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002155 if (function_return_.is_bound()) {
2156 // If the function return label is already bound we reuse the
2157 // code by jumping to the return site.
2158 function_return_.Jump();
2159 } else {
2160 function_return_.Bind();
2161 GenerateReturnSequence();
2162 }
2163 }
2164}
Steve Blocka7e24c12009-10-30 11:49:00 +00002165
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002166
2167void CodeGenerator::GenerateReturnSequence() {
2168 if (FLAG_trace) {
2169 // Push the return value on the stack as the parameter.
2170 // Runtime::TraceExit returns the parameter as it is.
2171 frame_->EmitPush(r0);
2172 frame_->CallRuntime(Runtime::kTraceExit, 1);
2173 }
2174
2175#ifdef DEBUG
2176 // Add a label for checking the size of the code used for returning.
2177 Label check_exit_codesize;
2178 masm_->bind(&check_exit_codesize);
2179#endif
2180 // Make sure that the constant pool is not emitted inside of the return
2181 // sequence.
2182 { Assembler::BlockConstPoolScope block_const_pool(masm_);
2183 // Tear down the frame which will restore the caller's frame pointer and
2184 // the link register.
2185 frame_->Exit();
2186
2187 // Here we use masm_-> instead of the __ macro to avoid the code coverage
2188 // tool from instrumenting as we rely on the code size here.
2189 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
2190 masm_->add(sp, sp, Operand(sp_delta));
2191 masm_->Jump(lr);
2192 DeleteFrame();
2193
2194#ifdef DEBUG
2195 // Check that the size of the code used for returning matches what is
2196 // expected by the debugger. If the sp_delts above cannot be encoded in
2197 // the add instruction the add will generate two instructions.
2198 int return_sequence_length =
2199 masm_->InstructionsGeneratedSince(&check_exit_codesize);
2200 CHECK(return_sequence_length ==
2201 Assembler::kJSReturnSequenceInstructions ||
2202 return_sequence_length ==
2203 Assembler::kJSReturnSequenceInstructions + 1);
2204#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002205 }
2206}
2207
2208
2209void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2210#ifdef DEBUG
2211 int original_height = frame_->height();
2212#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 Comment cmnt(masm_, "[ WithEnterStatement");
2214 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002215 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002216 if (node->is_catch_block()) {
2217 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2218 } else {
2219 frame_->CallRuntime(Runtime::kPushContext, 1);
2220 }
2221#ifdef DEBUG
2222 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01002223 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002224 verified_true.Branch(eq);
2225 __ stop("PushContext: r0 is expected to be the same as cp");
2226 verified_true.Bind();
2227#endif
2228 // Update context local.
2229 __ str(cp, frame_->Context());
2230 ASSERT(frame_->height() == original_height);
2231}
2232
2233
2234void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2235#ifdef DEBUG
2236 int original_height = frame_->height();
2237#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 Comment cmnt(masm_, "[ WithExitStatement");
2239 CodeForStatementPosition(node);
2240 // Pop context.
2241 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2242 // Update context local.
2243 __ str(cp, frame_->Context());
2244 ASSERT(frame_->height() == original_height);
2245}
2246
2247
2248void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2249#ifdef DEBUG
2250 int original_height = frame_->height();
2251#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002252 Comment cmnt(masm_, "[ SwitchStatement");
2253 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002254 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002255
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002256 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002257
2258 JumpTarget next_test;
2259 JumpTarget fall_through;
2260 JumpTarget default_entry;
2261 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2262 ZoneList<CaseClause*>* cases = node->cases();
2263 int length = cases->length();
2264 CaseClause* default_clause = NULL;
2265
2266 for (int i = 0; i < length; i++) {
2267 CaseClause* clause = cases->at(i);
2268 if (clause->is_default()) {
2269 // Remember the default clause and compile it at the end.
2270 default_clause = clause;
2271 continue;
2272 }
2273
2274 Comment cmnt(masm_, "[ Case clause");
2275 // Compile the test.
2276 next_test.Bind();
2277 next_test.Unuse();
2278 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002279 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002280 Comparison(eq, NULL, clause->label(), true);
2281 Branch(false, &next_test);
2282
2283 // Before entering the body from the test, remove the switch value from
2284 // the stack.
2285 frame_->Drop();
2286
2287 // Label the body so that fall through is enabled.
2288 if (i > 0 && cases->at(i - 1)->is_default()) {
2289 default_exit.Bind();
2290 } else {
2291 fall_through.Bind();
2292 fall_through.Unuse();
2293 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002294 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002295
2296 // If control flow can fall through from the body, jump to the next body
2297 // or the end of the statement.
2298 if (frame_ != NULL) {
2299 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2300 default_entry.Jump();
2301 } else {
2302 fall_through.Jump();
2303 }
2304 }
2305 }
2306
2307 // The final "test" removes the switch value.
2308 next_test.Bind();
2309 frame_->Drop();
2310
2311 // If there is a default clause, compile it.
2312 if (default_clause != NULL) {
2313 Comment cmnt(masm_, "[ Default clause");
2314 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002315 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002316 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002317 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002318 if (frame_ != NULL && default_exit.is_bound()) {
2319 default_exit.Jump();
2320 }
2321 }
2322
2323 if (fall_through.is_linked()) {
2324 fall_through.Bind();
2325 }
2326
2327 if (node->break_target()->is_linked()) {
2328 node->break_target()->Bind();
2329 }
2330 node->break_target()->Unuse();
2331 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2332}
2333
2334
Steve Block3ce2e202009-11-05 08:53:23 +00002335void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002336#ifdef DEBUG
2337 int original_height = frame_->height();
2338#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002339 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002340 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002341 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002342 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002343 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002344
Steve Block3ce2e202009-11-05 08:53:23 +00002345 // Label the top of the loop for the backward CFG edge. If the test
2346 // is always true we can use the continue target, and if the test is
2347 // always false there is no need.
2348 ConditionAnalysis info = AnalyzeCondition(node->cond());
2349 switch (info) {
2350 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002351 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002352 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002353 break;
2354 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002355 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002356 break;
2357 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002358 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002359 body.Bind();
2360 break;
2361 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002362
Steve Block3ce2e202009-11-05 08:53:23 +00002363 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002364 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002365
Steve Blockd0582a62009-12-15 09:54:21 +00002366 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002367 switch (info) {
2368 case ALWAYS_TRUE:
2369 // If control can fall off the end of the body, jump back to the
2370 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002371 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002372 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002373 }
2374 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002375 case ALWAYS_FALSE:
2376 // If we have a continue in the body, we only have to bind its
2377 // jump target.
2378 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002379 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002380 }
Steve Block3ce2e202009-11-05 08:53:23 +00002381 break;
2382 case DONT_KNOW:
2383 // We have to compile the test expression if it can be reached by
2384 // control flow falling out of the body or via continue.
2385 if (node->continue_target()->is_linked()) {
2386 node->continue_target()->Bind();
2387 }
2388 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002389 Comment cmnt(masm_, "[ DoWhileCondition");
2390 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002391 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002393 // A invalid frame here indicates that control did not
2394 // fall out of the test expression.
2395 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002396 }
2397 }
2398 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002399 }
2400
2401 if (node->break_target()->is_linked()) {
2402 node->break_target()->Bind();
2403 }
Steve Block6ded16b2010-05-10 14:33:55 +01002404 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002405 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2406}
2407
2408
2409void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2410#ifdef DEBUG
2411 int original_height = frame_->height();
2412#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002413 Comment cmnt(masm_, "[ WhileStatement");
2414 CodeForStatementPosition(node);
2415
2416 // If the test is never true and has no side effects there is no need
2417 // to compile the test or body.
2418 ConditionAnalysis info = AnalyzeCondition(node->cond());
2419 if (info == ALWAYS_FALSE) return;
2420
Kristian Monsen25f61362010-05-21 11:50:48 +01002421 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002422 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002423
2424 // Label the top of the loop with the continue target for the backward
2425 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002426 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002427 node->continue_target()->Bind();
2428
2429 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002430 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002431 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002432 if (has_valid_frame()) {
2433 // A NULL frame indicates that control did not fall out of the
2434 // test expression.
2435 Branch(false, node->break_target());
2436 }
2437 if (has_valid_frame() || body.is_linked()) {
2438 body.Bind();
2439 }
2440 }
2441
2442 if (has_valid_frame()) {
2443 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002444 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002445
2446 // If control flow can fall out of the body, jump back to the top.
2447 if (has_valid_frame()) {
2448 node->continue_target()->Jump();
2449 }
2450 }
2451 if (node->break_target()->is_linked()) {
2452 node->break_target()->Bind();
2453 }
Steve Block6ded16b2010-05-10 14:33:55 +01002454 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002455 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2456}
2457
2458
2459void CodeGenerator::VisitForStatement(ForStatement* node) {
2460#ifdef DEBUG
2461 int original_height = frame_->height();
2462#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002463 Comment cmnt(masm_, "[ ForStatement");
2464 CodeForStatementPosition(node);
2465 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002466 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002467 }
2468
2469 // If the test is never true there is no need to compile the test or
2470 // body.
2471 ConditionAnalysis info = AnalyzeCondition(node->cond());
2472 if (info == ALWAYS_FALSE) return;
2473
Kristian Monsen25f61362010-05-21 11:50:48 +01002474 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002475 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002476
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002477 // We know that the loop index is a smi if it is not modified in the
2478 // loop body and it is checked against a constant limit in the loop
2479 // condition. In this case, we reset the static type information of the
2480 // loop index to smi before compiling the body, the update expression, and
2481 // the bottom check of the loop condition.
2482 TypeInfoCodeGenState type_info_scope(this,
2483 node->is_fast_smi_loop() ?
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002484 node->loop_variable()->AsSlot() :
2485 NULL,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002486 TypeInfo::Smi());
2487
Steve Block3ce2e202009-11-05 08:53:23 +00002488 // If there is no update statement, label the top of the loop with the
2489 // continue target, otherwise with the loop target.
2490 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2491 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002492 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002493 node->continue_target()->Bind();
2494 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002495 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002496 loop.Bind();
2497 }
2498
2499 // If the test is always true, there is no need to compile it.
2500 if (info == DONT_KNOW) {
2501 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002502 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002503 if (has_valid_frame()) {
2504 Branch(false, node->break_target());
2505 }
2506 if (has_valid_frame() || body.is_linked()) {
2507 body.Bind();
2508 }
2509 }
2510
2511 if (has_valid_frame()) {
2512 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002513 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002514
2515 if (node->next() == NULL) {
2516 // If there is no update statement and control flow can fall out
2517 // of the loop, jump directly to the continue label.
2518 if (has_valid_frame()) {
2519 node->continue_target()->Jump();
2520 }
2521 } else {
2522 // If there is an update statement and control flow can reach it
2523 // via falling out of the body of the loop or continuing, we
2524 // compile the update statement.
2525 if (node->continue_target()->is_linked()) {
2526 node->continue_target()->Bind();
2527 }
2528 if (has_valid_frame()) {
2529 // Record source position of the statement as this code which is
2530 // after the code for the body actually belongs to the loop
2531 // statement and not the body.
2532 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002533 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002534 loop.Jump();
2535 }
2536 }
2537 }
2538 if (node->break_target()->is_linked()) {
2539 node->break_target()->Bind();
2540 }
Steve Block6ded16b2010-05-10 14:33:55 +01002541 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002542 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2543}
2544
2545
2546void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2547#ifdef DEBUG
2548 int original_height = frame_->height();
2549#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002550 Comment cmnt(masm_, "[ ForInStatement");
2551 CodeForStatementPosition(node);
2552
2553 JumpTarget primitive;
2554 JumpTarget jsobject;
2555 JumpTarget fixed_array;
2556 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2557 JumpTarget end_del_check;
2558 JumpTarget exit;
2559
2560 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002561 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002562
Iain Merrick75681382010-08-19 15:07:18 +01002563 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002564 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2565 // to the specification. 12.6.4 mandates a call to ToObject.
2566 frame_->EmitPop(r0);
2567 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2568 __ cmp(r0, ip);
2569 exit.Branch(eq);
2570 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2571 __ cmp(r0, ip);
2572 exit.Branch(eq);
2573
2574 // Stack layout in body:
2575 // [iteration counter (Smi)]
2576 // [length of array]
2577 // [FixedArray]
2578 // [Map or 0]
2579 // [Object]
2580
2581 // Check if enumerable is already a JSObject
2582 __ tst(r0, Operand(kSmiTagMask));
2583 primitive.Branch(eq);
2584 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2585 jsobject.Branch(hs);
2586
2587 primitive.Bind();
2588 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002589 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002590
2591 jsobject.Bind();
2592 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002593 // r0: value to be iterated over
2594 frame_->EmitPush(r0); // Push the object being iterated over.
2595
2596 // Check cache validity in generated code. This is a fast case for
2597 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2598 // guarantee cache validity, call the runtime system to check cache
2599 // validity or get the property names in a fixed array.
2600 JumpTarget call_runtime;
2601 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2602 JumpTarget check_prototype;
2603 JumpTarget use_cache;
2604 __ mov(r1, Operand(r0));
2605 loop.Bind();
2606 // Check that there are no elements.
2607 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2608 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2609 __ cmp(r2, r4);
2610 call_runtime.Branch(ne);
2611 // Check that instance descriptors are not empty so that we can
2612 // check for an enum cache. Leave the map in r3 for the subsequent
2613 // prototype load.
2614 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2615 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2616 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2617 __ cmp(r2, ip);
2618 call_runtime.Branch(eq);
2619 // Check that there in an enum cache in the non-empty instance
2620 // descriptors. This is the case if the next enumeration index
2621 // field does not contain a smi.
2622 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2623 __ tst(r2, Operand(kSmiTagMask));
2624 call_runtime.Branch(eq);
2625 // For all objects but the receiver, check that the cache is empty.
2626 // r4: empty fixed array root.
2627 __ cmp(r1, r0);
2628 check_prototype.Branch(eq);
2629 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2630 __ cmp(r2, r4);
2631 call_runtime.Branch(ne);
2632 check_prototype.Bind();
2633 // Load the prototype from the map and loop if non-null.
2634 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2635 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2636 __ cmp(r1, ip);
2637 loop.Branch(ne);
2638 // The enum cache is valid. Load the map of the object being
2639 // iterated over and use the cache for the iteration.
2640 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2641 use_cache.Jump();
2642
2643 call_runtime.Bind();
2644 // Call the runtime to get the property names for the object.
2645 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002646 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2647
Steve Blockd0582a62009-12-15 09:54:21 +00002648 // If we got a map from the runtime call, we can do a fast
2649 // modification check. Otherwise, we got a fixed array, and we have
2650 // to do a slow check.
2651 // r0: map or fixed array (result from call to
2652 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002653 __ mov(r2, Operand(r0));
2654 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2655 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2656 __ cmp(r1, ip);
2657 fixed_array.Branch(ne);
2658
Steve Blockd0582a62009-12-15 09:54:21 +00002659 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002660 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002661 // r0: map (either the result from a call to
2662 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2663 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002664 __ mov(r1, Operand(r0));
2665 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2666 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2667 __ ldr(r2,
2668 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2669
2670 frame_->EmitPush(r0); // map
2671 frame_->EmitPush(r2); // enum cache bridge cache
2672 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002673 frame_->EmitPush(r0);
2674 __ mov(r0, Operand(Smi::FromInt(0)));
2675 frame_->EmitPush(r0);
2676 entry.Jump();
2677
2678 fixed_array.Bind();
2679 __ mov(r1, Operand(Smi::FromInt(0)));
2680 frame_->EmitPush(r1); // insert 0 in place of Map
2681 frame_->EmitPush(r0);
2682
2683 // Push the length of the array and the initial index onto the stack.
2684 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002685 frame_->EmitPush(r0);
2686 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2687 frame_->EmitPush(r0);
2688
2689 // Condition.
2690 entry.Bind();
2691 // sp[0] : index
2692 // sp[1] : array/enum cache length
2693 // sp[2] : array or enum cache
2694 // sp[3] : 0 or map
2695 // sp[4] : enumerable
2696 // Grab the current frame's height for the break and continue
2697 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002698 node->break_target()->SetExpectedHeight();
2699 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002700
Kristian Monsen25f61362010-05-21 11:50:48 +01002701 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002702 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002703 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002704 node->break_target()->Branch(hs);
2705
Steve Blocka7e24c12009-10-30 11:49:00 +00002706 // Get the i'th entry of the array.
2707 __ ldr(r2, frame_->ElementAt(2));
2708 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2709 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2710
2711 // Get Map or 0.
2712 __ ldr(r2, frame_->ElementAt(3));
2713 // Check if this (still) matches the map of the enumerable.
2714 // If not, we have to filter the key.
2715 __ ldr(r1, frame_->ElementAt(4));
2716 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2717 __ cmp(r1, Operand(r2));
2718 end_del_check.Branch(eq);
2719
2720 // Convert the entry to a string (or null if it isn't a property anymore).
2721 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2722 frame_->EmitPush(r0);
2723 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002724 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Iain Merrick75681382010-08-19 15:07:18 +01002725 __ mov(r3, Operand(r0), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002726 // If the property has been removed while iterating, we just skip it.
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 node->continue_target()->Branch(eq);
2728
2729 end_del_check.Bind();
2730 // Store the entry in the 'each' expression and take another spin in the
2731 // loop. r3: i'th entry of the enum cache (or string there of)
2732 frame_->EmitPush(r3); // push entry
Iain Merrick75681382010-08-19 15:07:18 +01002733 { VirtualFrame::RegisterAllocationScope scope(this);
2734 Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00002735 if (!each.is_illegal()) {
2736 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01002737 // Loading a reference may leave the frame in an unspilled state.
2738 frame_->SpillAll(); // Sync stack to memory.
2739 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00002740 __ ldr(r0, frame_->ElementAt(each.size()));
2741 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002742 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002743 frame_->Drop(2); // The result of the set and the extra pushed value.
Leon Clarked91b9f72010-01-27 17:25:45 +00002744 } else {
2745 // If the reference was to a slot we rely on the convenient property
Iain Merrick75681382010-08-19 15:07:18 +01002746 // that it doesn't matter whether a value (eg, ebx pushed above) is
Leon Clarked91b9f72010-01-27 17:25:45 +00002747 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002748 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002749 frame_->Drop(1); // Drop the result of the set operation.
Steve Blocka7e24c12009-10-30 11:49:00 +00002750 }
2751 }
2752 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002753 // Body.
2754 CheckStack(); // TODO(1222600): ignore if body contains calls.
Iain Merrick75681382010-08-19 15:07:18 +01002755 { VirtualFrame::RegisterAllocationScope scope(this);
2756 Visit(node->body());
2757 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002758
2759 // Next. Reestablish a spilled frame in case we are coming here via
2760 // a continue in the body.
2761 node->continue_target()->Bind();
2762 frame_->SpillAll();
2763 frame_->EmitPop(r0);
2764 __ add(r0, r0, Operand(Smi::FromInt(1)));
2765 frame_->EmitPush(r0);
2766 entry.Jump();
2767
2768 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2769 // any frame.
2770 node->break_target()->Bind();
2771 frame_->Drop(5);
2772
2773 // Exit.
2774 exit.Bind();
2775 node->continue_target()->Unuse();
2776 node->break_target()->Unuse();
2777 ASSERT(frame_->height() == original_height);
2778}
2779
2780
Steve Block3ce2e202009-11-05 08:53:23 +00002781void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002782#ifdef DEBUG
2783 int original_height = frame_->height();
2784#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002785 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002786 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002787 CodeForStatementPosition(node);
2788
2789 JumpTarget try_block;
2790 JumpTarget exit;
2791
2792 try_block.Call();
2793 // --- Catch block ---
2794 frame_->EmitPush(r0);
2795
2796 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002797 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002798 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
2799 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002800
2801 // Remove the exception from the stack.
2802 frame_->Drop();
2803
Iain Merrick75681382010-08-19 15:07:18 +01002804 { VirtualFrame::RegisterAllocationScope scope(this);
2805 VisitStatements(node->catch_block()->statements());
2806 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002807 if (frame_ != NULL) {
2808 exit.Jump();
2809 }
2810
2811
2812 // --- Try block ---
2813 try_block.Bind();
2814
2815 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2816 int handler_height = frame_->height();
2817
2818 // Shadow the labels for all escapes from the try block, including
2819 // returns. During shadowing, the original label is hidden as the
2820 // LabelShadow and operations on the original actually affect the
2821 // shadowing label.
2822 //
2823 // We should probably try to unify the escaping labels and the return
2824 // label.
2825 int nof_escapes = node->escaping_targets()->length();
2826 List<ShadowTarget*> shadows(1 + nof_escapes);
2827
2828 // Add the shadow target for the function return.
2829 static const int kReturnShadowIndex = 0;
2830 shadows.Add(new ShadowTarget(&function_return_));
2831 bool function_return_was_shadowed = function_return_is_shadowed_;
2832 function_return_is_shadowed_ = true;
2833 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2834
2835 // Add the remaining shadow targets.
2836 for (int i = 0; i < nof_escapes; i++) {
2837 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2838 }
2839
2840 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002841 { VirtualFrame::RegisterAllocationScope scope(this);
2842 VisitStatements(node->try_block()->statements());
2843 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002844
2845 // Stop the introduced shadowing and count the number of required unlinks.
2846 // After shadowing stops, the original labels are unshadowed and the
2847 // LabelShadows represent the formerly shadowing labels.
2848 bool has_unlinks = false;
2849 for (int i = 0; i < shadows.length(); i++) {
2850 shadows[i]->StopShadowing();
2851 has_unlinks = has_unlinks || shadows[i]->is_linked();
2852 }
2853 function_return_is_shadowed_ = function_return_was_shadowed;
2854
2855 // Get an external reference to the handler address.
2856 ExternalReference handler_address(Top::k_handler_address);
2857
2858 // If we can fall off the end of the try block, unlink from try chain.
2859 if (has_valid_frame()) {
2860 // The next handler address is on top of the frame. Unlink from
2861 // the handler list and drop the rest of this handler from the
2862 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002863 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002864 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002865 __ mov(r3, Operand(handler_address));
2866 __ str(r1, MemOperand(r3));
2867 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2868 if (has_unlinks) {
2869 exit.Jump();
2870 }
2871 }
2872
2873 // Generate unlink code for the (formerly) shadowing labels that have been
2874 // jumped to. Deallocate each shadow target.
2875 for (int i = 0; i < shadows.length(); i++) {
2876 if (shadows[i]->is_linked()) {
2877 // Unlink from try chain;
2878 shadows[i]->Bind();
2879 // Because we can be jumping here (to spilled code) from unspilled
2880 // code, we need to reestablish a spilled frame at this block.
2881 frame_->SpillAll();
2882
2883 // Reload sp from the top handler, because some statements that we
2884 // break from (eg, for...in) may have left stuff on the stack.
2885 __ mov(r3, Operand(handler_address));
2886 __ ldr(sp, MemOperand(r3));
2887 frame_->Forget(frame_->height() - handler_height);
2888
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002889 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002890 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002891 __ str(r1, MemOperand(r3));
2892 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2893
2894 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2895 frame_->PrepareForReturn();
2896 }
2897 shadows[i]->other_target()->Jump();
2898 }
2899 }
2900
2901 exit.Bind();
2902 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2903}
2904
2905
Steve Block3ce2e202009-11-05 08:53:23 +00002906void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002907#ifdef DEBUG
2908 int original_height = frame_->height();
2909#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002910 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002911 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002912 CodeForStatementPosition(node);
2913
2914 // State: Used to keep track of reason for entering the finally
2915 // block. Should probably be extended to hold information for
2916 // break/continue from within the try block.
2917 enum { FALLING, THROWING, JUMPING };
2918
2919 JumpTarget try_block;
2920 JumpTarget finally_block;
2921
2922 try_block.Call();
2923
2924 frame_->EmitPush(r0); // save exception object on the stack
2925 // In case of thrown exceptions, this is where we continue.
2926 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2927 finally_block.Jump();
2928
2929 // --- Try block ---
2930 try_block.Bind();
2931
2932 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2933 int handler_height = frame_->height();
2934
2935 // Shadow the labels for all escapes from the try block, including
2936 // returns. Shadowing hides the original label as the LabelShadow and
2937 // operations on the original actually affect the shadowing label.
2938 //
2939 // We should probably try to unify the escaping labels and the return
2940 // label.
2941 int nof_escapes = node->escaping_targets()->length();
2942 List<ShadowTarget*> shadows(1 + nof_escapes);
2943
2944 // Add the shadow target for the function return.
2945 static const int kReturnShadowIndex = 0;
2946 shadows.Add(new ShadowTarget(&function_return_));
2947 bool function_return_was_shadowed = function_return_is_shadowed_;
2948 function_return_is_shadowed_ = true;
2949 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2950
2951 // Add the remaining shadow targets.
2952 for (int i = 0; i < nof_escapes; i++) {
2953 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2954 }
2955
2956 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002957 { VirtualFrame::RegisterAllocationScope scope(this);
2958 VisitStatements(node->try_block()->statements());
2959 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002960
2961 // Stop the introduced shadowing and count the number of required unlinks.
2962 // After shadowing stops, the original labels are unshadowed and the
2963 // LabelShadows represent the formerly shadowing labels.
2964 int nof_unlinks = 0;
2965 for (int i = 0; i < shadows.length(); i++) {
2966 shadows[i]->StopShadowing();
2967 if (shadows[i]->is_linked()) nof_unlinks++;
2968 }
2969 function_return_is_shadowed_ = function_return_was_shadowed;
2970
2971 // Get an external reference to the handler address.
2972 ExternalReference handler_address(Top::k_handler_address);
2973
2974 // If we can fall off the end of the try block, unlink from the try
2975 // chain and set the state on the frame to FALLING.
2976 if (has_valid_frame()) {
2977 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002978 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002979 frame_->EmitPop(r1);
2980 __ mov(r3, Operand(handler_address));
2981 __ str(r1, MemOperand(r3));
2982 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2983
2984 // Fake a top of stack value (unneeded when FALLING) and set the
2985 // state in r2, then jump around the unlink blocks if any.
2986 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2987 frame_->EmitPush(r0);
2988 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2989 if (nof_unlinks > 0) {
2990 finally_block.Jump();
2991 }
2992 }
2993
2994 // Generate code to unlink and set the state for the (formerly)
2995 // shadowing targets that have been jumped to.
2996 for (int i = 0; i < shadows.length(); i++) {
2997 if (shadows[i]->is_linked()) {
2998 // If we have come from the shadowed return, the return value is
2999 // in (a non-refcounted reference to) r0. We must preserve it
3000 // until it is pushed.
3001 //
3002 // Because we can be jumping here (to spilled code) from
3003 // unspilled code, we need to reestablish a spilled frame at
3004 // this block.
3005 shadows[i]->Bind();
3006 frame_->SpillAll();
3007
3008 // Reload sp from the top handler, because some statements that
3009 // we break from (eg, for...in) may have left stuff on the
3010 // stack.
3011 __ mov(r3, Operand(handler_address));
3012 __ ldr(sp, MemOperand(r3));
3013 frame_->Forget(frame_->height() - handler_height);
3014
3015 // Unlink this handler and drop it from the frame. The next
3016 // handler address is currently on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003017 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003018 frame_->EmitPop(r1);
3019 __ str(r1, MemOperand(r3));
3020 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3021
3022 if (i == kReturnShadowIndex) {
3023 // If this label shadowed the function return, materialize the
3024 // return value on the stack.
3025 frame_->EmitPush(r0);
3026 } else {
3027 // Fake TOS for targets that shadowed breaks and continues.
3028 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3029 frame_->EmitPush(r0);
3030 }
3031 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
3032 if (--nof_unlinks > 0) {
3033 // If this is not the last unlink block, jump around the next.
3034 finally_block.Jump();
3035 }
3036 }
3037 }
3038
3039 // --- Finally block ---
3040 finally_block.Bind();
3041
3042 // Push the state on the stack.
3043 frame_->EmitPush(r2);
3044
3045 // We keep two elements on the stack - the (possibly faked) result
3046 // and the state - while evaluating the finally block.
3047 //
3048 // Generate code for the statements in the finally block.
Iain Merrick75681382010-08-19 15:07:18 +01003049 { VirtualFrame::RegisterAllocationScope scope(this);
3050 VisitStatements(node->finally_block()->statements());
3051 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003052
3053 if (has_valid_frame()) {
3054 // Restore state and return value or faked TOS.
3055 frame_->EmitPop(r2);
3056 frame_->EmitPop(r0);
3057 }
3058
3059 // Generate code to jump to the right destination for all used
3060 // formerly shadowing targets. Deallocate each shadow target.
3061 for (int i = 0; i < shadows.length(); i++) {
3062 if (has_valid_frame() && shadows[i]->is_bound()) {
3063 JumpTarget* original = shadows[i]->other_target();
3064 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
3065 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
3066 JumpTarget skip;
3067 skip.Branch(ne);
3068 frame_->PrepareForReturn();
3069 original->Jump();
3070 skip.Bind();
3071 } else {
3072 original->Branch(eq);
3073 }
3074 }
3075 }
3076
3077 if (has_valid_frame()) {
3078 // Check if we need to rethrow the exception.
3079 JumpTarget exit;
3080 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
3081 exit.Branch(ne);
3082
3083 // Rethrow exception.
3084 frame_->EmitPush(r0);
3085 frame_->CallRuntime(Runtime::kReThrow, 1);
3086
3087 // Done.
3088 exit.Bind();
3089 }
3090 ASSERT(!has_valid_frame() || frame_->height() == original_height);
3091}
3092
3093
3094void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3095#ifdef DEBUG
3096 int original_height = frame_->height();
3097#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003098 Comment cmnt(masm_, "[ DebuggerStatament");
3099 CodeForStatementPosition(node);
3100#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00003101 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00003102#endif
3103 // Ignore the return value.
3104 ASSERT(frame_->height() == original_height);
3105}
3106
3107
Steve Block6ded16b2010-05-10 14:33:55 +01003108void CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003109 Handle<SharedFunctionInfo> function_info,
3110 bool pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003111 // Use the fast case closure allocation code that allocates in new
3112 // space for nested functions that don't need literals cloning.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003113 if (scope()->is_function_scope() &&
3114 function_info->num_literals() == 0 &&
3115 !pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003116 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003117 frame_->EmitPush(Operand(function_info));
3118 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003119 frame_->CallStub(&stub, 1);
3120 frame_->EmitPush(r0);
3121 } else {
3122 // Create a new closure.
3123 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003124 frame_->EmitPush(Operand(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003125 frame_->EmitPush(Operand(pretenure
3126 ? Factory::true_value()
3127 : Factory::false_value()));
3128 frame_->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00003129 frame_->EmitPush(r0);
3130 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003131}
3132
3133
3134void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3135#ifdef DEBUG
3136 int original_height = frame_->height();
3137#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003138 Comment cmnt(masm_, "[ FunctionLiteral");
3139
Steve Block6ded16b2010-05-10 14:33:55 +01003140 // Build the function info and instantiate it.
3141 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01003142 Compiler::BuildFunctionInfo(node, script());
3143 if (function_info.is_null()) {
3144 SetStackOverflow();
Steve Blocka7e24c12009-10-30 11:49:00 +00003145 ASSERT(frame_->height() == original_height);
3146 return;
3147 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003148 InstantiateFunction(function_info, node->pretenure());
Steve Block6ded16b2010-05-10 14:33:55 +01003149 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003150}
3151
3152
Steve Block6ded16b2010-05-10 14:33:55 +01003153void CodeGenerator::VisitSharedFunctionInfoLiteral(
3154 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003155#ifdef DEBUG
3156 int original_height = frame_->height();
3157#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003158 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003159 InstantiateFunction(node->shared_function_info(), false);
Steve Block6ded16b2010-05-10 14:33:55 +01003160 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003161}
3162
3163
3164void CodeGenerator::VisitConditional(Conditional* node) {
3165#ifdef DEBUG
3166 int original_height = frame_->height();
3167#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003168 Comment cmnt(masm_, "[ Conditional");
3169 JumpTarget then;
3170 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003171 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003172 if (has_valid_frame()) {
3173 Branch(false, &else_);
3174 }
3175 if (has_valid_frame() || then.is_linked()) {
3176 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003177 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003178 }
3179 if (else_.is_linked()) {
3180 JumpTarget exit;
3181 if (has_valid_frame()) exit.Jump();
3182 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003183 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003184 if (exit.is_linked()) exit.Bind();
3185 }
Steve Block6ded16b2010-05-10 14:33:55 +01003186 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003187}
3188
3189
3190void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003191 if (slot->type() == Slot::LOOKUP) {
3192 ASSERT(slot->var()->is_dynamic());
3193
Steve Block6ded16b2010-05-10 14:33:55 +01003194 // JumpTargets do not yet support merging frames so the frame must be
3195 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00003196 JumpTarget slow;
3197 JumpTarget done;
3198
Kristian Monsen25f61362010-05-21 11:50:48 +01003199 // Generate fast case for loading from slots that correspond to
3200 // local/global variables or arguments unless they are shadowed by
3201 // eval-introduced bindings.
3202 EmitDynamicLoadFromSlotFastCase(slot,
3203 typeof_state,
3204 &slow,
3205 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003206
3207 slow.Bind();
3208 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003209 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003210
3211 if (typeof_state == INSIDE_TYPEOF) {
3212 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3213 } else {
3214 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3215 }
3216
3217 done.Bind();
3218 frame_->EmitPush(r0);
3219
3220 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003221 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003222 TypeInfo info = type_info(slot);
3223 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01003224
Steve Blocka7e24c12009-10-30 11:49:00 +00003225 if (slot->var()->mode() == Variable::CONST) {
3226 // Const slots may contain 'the hole' value (the constant hasn't been
3227 // initialized yet) which needs to be converted into the 'undefined'
3228 // value.
3229 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01003230 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003231 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003232 __ cmp(tos, ip);
3233 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3234 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00003235 }
3236 }
3237}
3238
3239
Steve Block6ded16b2010-05-10 14:33:55 +01003240void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3241 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01003242 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01003243 LoadFromSlot(slot, state);
3244
3245 // Bail out quickly if we're not using lazy arguments allocation.
3246 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3247
3248 // ... or if the slot isn't a non-parameter arguments slot.
3249 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3250
Steve Block8defd9f2010-07-08 12:39:36 +01003251 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01003252 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01003253 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01003254
3255 // If the loaded value is the sentinel that indicates that we
3256 // haven't loaded the arguments object yet, we need to do it now.
3257 JumpTarget exit;
3258 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003259 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003260 exit.Branch(ne);
3261 frame_->Drop();
3262 StoreArgumentsObject(false);
3263 exit.Bind();
3264}
3265
3266
Leon Clarkee46be812010-01-19 14:06:41 +00003267void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3268 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003269 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003270 if (slot->type() == Slot::LOOKUP) {
3271 ASSERT(slot->var()->is_dynamic());
3272
3273 // For now, just do a runtime call.
3274 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003275 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003276
3277 if (init_state == CONST_INIT) {
3278 // Same as the case for a normal store, but ignores attribute
3279 // (e.g. READ_ONLY) of context slot so that we can initialize
3280 // const properties (introduced via eval("const foo = (some
3281 // expr);")). Also, uses the current function context instead of
3282 // the top context.
3283 //
3284 // Note that we must declare the foo upon entry of eval(), via a
3285 // context slot declaration, but we cannot initialize it at the
3286 // same time, because the const declaration may be at the end of
3287 // the eval code (sigh...) and the const variable may have been
3288 // used before (where its value is 'undefined'). Thus, we can only
3289 // do the initialization when we actually encounter the expression
3290 // and when the expression operands are defined and valid, and
3291 // thus we need the split into 2 operations: declaration of the
3292 // context slot followed by initialization.
3293 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3294 } else {
3295 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3296 }
3297 // Storing a variable must keep the (new) value on the expression
3298 // stack. This is necessary for compiling assignment expressions.
3299 frame_->EmitPush(r0);
3300
3301 } else {
3302 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003303 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003304 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003305
Steve Block6ded16b2010-05-10 14:33:55 +01003306 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003307 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003308
Leon Clarkee46be812010-01-19 14:06:41 +00003309 if (init_state == CONST_INIT) {
3310 ASSERT(slot->var()->mode() == Variable::CONST);
3311 // Only the first const initialization must be executed (the slot
3312 // still contains 'the hole' value). When the assignment is
3313 // executed, the code is identical to a normal store (see below).
3314 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003315 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003316 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003317 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003318 exit.Branch(ne);
3319 }
3320
3321 // We must execute the store. Storing a variable must keep the
3322 // (new) value on the stack. This is necessary for compiling
3323 // assignment expressions.
3324 //
3325 // Note: We will reach here even with slot->var()->mode() ==
3326 // Variable::CONST because of const declarations which will
3327 // initialize consts to 'the hole' value and by doing so, end up
3328 // calling this code. r2 may be loaded with context; used below in
3329 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003330 Register tos = frame_->Peek();
3331 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003332 if (slot->type() == Slot::CONTEXT) {
3333 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003334 __ tst(tos, Operand(kSmiTagMask));
3335 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003336 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003337 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003338 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003339 // We need an extra register. Until we have a way to do that in the
3340 // virtual frame we will cheat and ask for a free TOS register.
3341 Register scratch3 = frame_->GetTOSRegister();
3342 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003343 }
3344 // If we definitely did not jump over the assignment, we do not need
3345 // to bind the exit label. Doing so can defeat peephole
3346 // optimization.
3347 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3348 exit.Bind();
3349 }
3350 }
3351}
3352
3353
Steve Blocka7e24c12009-10-30 11:49:00 +00003354void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3355 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003356 JumpTarget* slow) {
3357 // Check that no extension objects have been created by calls to
3358 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003359 Register tmp = frame_->scratch0();
3360 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003361 Register context = cp;
3362 Scope* s = scope();
3363 while (s != NULL) {
3364 if (s->num_heap_slots() > 0) {
3365 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003366 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003367 // Check that extension is NULL.
3368 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3369 __ tst(tmp2, tmp2);
3370 slow->Branch(ne);
3371 }
3372 // Load next context in chain.
3373 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3374 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3375 context = tmp;
3376 }
3377 // If no outer scope calls eval, we do not need to check more
3378 // context extensions.
3379 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3380 s = s->outer_scope();
3381 }
3382
3383 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003384 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003385 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003386 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003387 __ bind(&next);
3388 // Terminate at global context.
3389 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3390 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3391 __ cmp(tmp2, ip);
3392 __ b(eq, &fast);
3393 // Check that extension is NULL.
3394 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3395 __ tst(tmp2, tmp2);
3396 slow->Branch(ne);
3397 // Load next context in chain.
3398 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3399 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3400 __ b(&next);
3401 __ bind(&fast);
3402 }
3403
Steve Blocka7e24c12009-10-30 11:49:00 +00003404 // Load the global object.
3405 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003406 // Setup the name register and call load IC.
3407 frame_->CallLoadIC(slot->var()->name(),
3408 typeof_state == INSIDE_TYPEOF
3409 ? RelocInfo::CODE_TARGET
3410 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003411}
3412
3413
Kristian Monsen25f61362010-05-21 11:50:48 +01003414void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3415 TypeofState typeof_state,
3416 JumpTarget* slow,
3417 JumpTarget* done) {
3418 // Generate fast-case code for variables that might be shadowed by
3419 // eval-introduced variables. Eval is used a lot without
3420 // introducing variables. In those cases, we do not want to
3421 // perform a runtime call for all variables in the scope
3422 // containing the eval.
3423 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3424 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3425 frame_->SpillAll();
3426 done->Jump();
3427
3428 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3429 frame_->SpillAll();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003430 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01003431 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3432 if (potential_slot != NULL) {
3433 // Generate fast case for locals that rewrite to slots.
3434 __ ldr(r0,
3435 ContextSlotOperandCheckExtensions(potential_slot,
3436 r1,
3437 r2,
3438 slow));
3439 if (potential_slot->var()->mode() == Variable::CONST) {
3440 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3441 __ cmp(r0, ip);
3442 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3443 }
3444 done->Jump();
3445 } else if (rewrite != NULL) {
3446 // Generate fast case for argument loads.
3447 Property* property = rewrite->AsProperty();
3448 if (property != NULL) {
3449 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3450 Literal* key_literal = property->key()->AsLiteral();
3451 if (obj_proxy != NULL &&
3452 key_literal != NULL &&
3453 obj_proxy->IsArguments() &&
3454 key_literal->handle()->IsSmi()) {
3455 // Load arguments object if there are no eval-introduced
3456 // variables. Then load the argument from the arguments
3457 // object using keyed load.
3458 __ ldr(r0,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003459 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01003460 r1,
3461 r2,
3462 slow));
3463 frame_->EmitPush(r0);
3464 __ mov(r1, Operand(key_literal->handle()));
3465 frame_->EmitPush(r1);
3466 EmitKeyedLoad();
3467 done->Jump();
3468 }
3469 }
3470 }
3471 }
3472}
3473
3474
Steve Blocka7e24c12009-10-30 11:49:00 +00003475void CodeGenerator::VisitSlot(Slot* node) {
3476#ifdef DEBUG
3477 int original_height = frame_->height();
3478#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003479 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003480 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3481 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003482}
3483
3484
3485void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3486#ifdef DEBUG
3487 int original_height = frame_->height();
3488#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003489 Comment cmnt(masm_, "[ VariableProxy");
3490
3491 Variable* var = node->var();
3492 Expression* expr = var->rewrite();
3493 if (expr != NULL) {
3494 Visit(expr);
3495 } else {
3496 ASSERT(var->is_global());
3497 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003498 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003499 }
Steve Block6ded16b2010-05-10 14:33:55 +01003500 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003501}
3502
3503
3504void CodeGenerator::VisitLiteral(Literal* node) {
3505#ifdef DEBUG
3506 int original_height = frame_->height();
3507#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003508 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003509 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003510 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003511 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003512 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003513 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003514}
3515
3516
3517void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3518#ifdef DEBUG
3519 int original_height = frame_->height();
3520#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003521 Comment cmnt(masm_, "[ RexExp Literal");
3522
Steve Block8defd9f2010-07-08 12:39:36 +01003523 Register tmp = VirtualFrame::scratch0();
3524 // Free up a TOS register that can be used to push the literal.
3525 Register literal = frame_->GetTOSRegister();
3526
Steve Blocka7e24c12009-10-30 11:49:00 +00003527 // Retrieve the literal array and check the allocated entry.
3528
3529 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003530 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003531
3532 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003533 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003534
3535 // Load the literal at the ast saved index.
3536 int literal_offset =
3537 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003538 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003539
Ben Murdochbb769b22010-08-11 14:56:33 +01003540 JumpTarget materialized;
Steve Blocka7e24c12009-10-30 11:49:00 +00003541 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003542 __ cmp(literal, ip);
3543 // This branch locks the virtual frame at the done label to match the
3544 // one we have here, where the literal register is not on the stack and
3545 // nothing is spilled.
Ben Murdochbb769b22010-08-11 14:56:33 +01003546 materialized.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00003547
Steve Block8defd9f2010-07-08 12:39:36 +01003548 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003549 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003550 // literal array (0)
3551 frame_->EmitPush(tmp);
3552 // literal index (1)
3553 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3554 // RegExp pattern (2)
3555 frame_->EmitPush(Operand(node->pattern()));
3556 // RegExp flags (3)
3557 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003558 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003559 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003560
Ben Murdochbb769b22010-08-11 14:56:33 +01003561 materialized.Bind();
3562
Steve Block8defd9f2010-07-08 12:39:36 +01003563 frame_->EmitPush(literal);
Ben Murdochbb769b22010-08-11 14:56:33 +01003564 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3565 frame_->EmitPush(Operand(Smi::FromInt(size)));
3566 frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
3567 // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
3568 // r0 is newly allocated space.
3569
3570 // Reuse literal variable with (possibly) a new register, still holding
3571 // the materialized boilerplate.
3572 literal = frame_->PopToRegister(r0);
3573
3574 __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
3575
3576 // Push the clone.
3577 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003578 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003579}
3580
3581
Steve Blocka7e24c12009-10-30 11:49:00 +00003582void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3583#ifdef DEBUG
3584 int original_height = frame_->height();
3585#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003586 Comment cmnt(masm_, "[ ObjectLiteral");
3587
Steve Block8defd9f2010-07-08 12:39:36 +01003588 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003589 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003590 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003591 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003592 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3593 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003594 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003595 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003596 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003597 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003598 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003599 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003600 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003601 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003602 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003603 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003604 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003605 frame_->EmitPush(r0); // save the result
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003606
3607 // Mark all computed expressions that are bound to a key that
3608 // is shadowed by a later occurrence of the same key. For the
3609 // marked expressions, no store code is emitted.
3610 node->CalculateEmitStore();
3611
Steve Blocka7e24c12009-10-30 11:49:00 +00003612 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003613 // At the start of each iteration, the top of stack contains
3614 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003615 ObjectLiteral::Property* property = node->properties()->at(i);
3616 Literal* key = property->key();
3617 Expression* value = property->value();
3618 switch (property->kind()) {
3619 case ObjectLiteral::Property::CONSTANT:
3620 break;
3621 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3622 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3623 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003624 case ObjectLiteral::Property::COMPUTED:
3625 if (key->handle()->IsSymbol()) {
3626 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003627 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003628 if (property->emit_store()) {
3629 frame_->PopToR0();
3630 // Fetch the object literal.
3631 frame_->SpillAllButCopyTOSToR1();
3632 __ mov(r2, Operand(key->handle()));
3633 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3634 } else {
3635 frame_->Drop();
3636 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003637 break;
3638 }
3639 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003640 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003641 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003642 Load(key);
3643 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003644 if (property->emit_store()) {
3645 frame_->CallRuntime(Runtime::kSetProperty, 3);
3646 } else {
3647 frame_->Drop(3);
3648 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003649 break;
3650 }
3651 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003652 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003653 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003654 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003655 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003656 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003657 break;
3658 }
3659 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003660 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003661 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003662 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003663 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003664 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003665 break;
3666 }
3667 }
3668 }
Steve Block6ded16b2010-05-10 14:33:55 +01003669 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003670}
3671
3672
Steve Blocka7e24c12009-10-30 11:49:00 +00003673void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3674#ifdef DEBUG
3675 int original_height = frame_->height();
3676#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003677 Comment cmnt(masm_, "[ ArrayLiteral");
3678
Steve Block8defd9f2010-07-08 12:39:36 +01003679 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003680 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003681 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003682 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003683 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3684 frame_->EmitPush(tos);
3685 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3686 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003687 int length = node->values()->length();
Iain Merrick75681382010-08-19 15:07:18 +01003688 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
3689 FastCloneShallowArrayStub stub(
3690 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3691 frame_->CallStub(&stub, 3);
3692 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
3693 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00003694 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01003695 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003696 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003697 } else {
Iain Merrick75681382010-08-19 15:07:18 +01003698 FastCloneShallowArrayStub stub(
3699 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Andrei Popescu402d9372010-02-26 13:31:12 +00003700 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003701 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003702 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003703 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003704
3705 // Generate code to set the elements in the array that are not
3706 // literals.
3707 for (int i = 0; i < node->values()->length(); i++) {
3708 Expression* value = node->values()->at(i);
3709
3710 // If value is a literal the property value is already set in the
3711 // boilerplate object.
3712 if (value->AsLiteral() != NULL) continue;
3713 // If value is a materialized literal the property value is already set
3714 // in the boilerplate object if it is simple.
3715 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3716
3717 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003718 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003719 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003720 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003721 frame_->SpillAllButCopyTOSToR1();
3722
Steve Blocka7e24c12009-10-30 11:49:00 +00003723 // Get the elements array.
3724 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3725
3726 // Write to the indexed properties array.
3727 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3728 __ str(r0, FieldMemOperand(r1, offset));
3729
3730 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003731 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003732 }
Steve Block6ded16b2010-05-10 14:33:55 +01003733 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003734}
3735
3736
3737void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3738#ifdef DEBUG
3739 int original_height = frame_->height();
3740#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003741 // Call runtime routine to allocate the catch extension object and
3742 // assign the exception value to the catch variable.
3743 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003744 Load(node->key());
3745 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003746 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3747 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003748 ASSERT_EQ(original_height + 1, frame_->height());
3749}
3750
3751
3752void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3753#ifdef DEBUG
3754 int original_height = frame_->height();
3755#endif
3756 Comment cmnt(masm(), "[ Variable Assignment");
3757 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3758 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003759 Slot* slot = var->AsSlot();
Steve Block6ded16b2010-05-10 14:33:55 +01003760 ASSERT(slot != NULL);
3761
3762 // Evaluate the right-hand side.
3763 if (node->is_compound()) {
3764 // For a compound assignment the right-hand side is a binary operation
3765 // between the current property value and the actual right-hand side.
3766 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3767
3768 // Perform the binary operation.
3769 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003770 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003771 if (literal != NULL && literal->handle()->IsSmi()) {
3772 SmiOperation(node->binary_op(),
3773 literal->handle(),
3774 false,
3775 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3776 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003777 GenerateInlineSmi inline_smi =
3778 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3779 if (literal != NULL) {
3780 ASSERT(!literal->handle()->IsSmi());
3781 inline_smi = DONT_GENERATE_INLINE_SMI;
3782 }
Steve Block6ded16b2010-05-10 14:33:55 +01003783 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003784 GenericBinaryOperation(node->binary_op(),
3785 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3786 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003787 }
3788 } else {
3789 Load(node->value());
3790 }
3791
3792 // Perform the assignment.
3793 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3794 CodeForSourcePosition(node->position());
3795 StoreToSlot(slot,
3796 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3797 }
3798 ASSERT_EQ(original_height + 1, frame_->height());
3799}
3800
3801
3802void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3803#ifdef DEBUG
3804 int original_height = frame_->height();
3805#endif
3806 Comment cmnt(masm(), "[ Named Property Assignment");
3807 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3808 Property* prop = node->target()->AsProperty();
3809 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3810
3811 // Initialize name and evaluate the receiver sub-expression if necessary. If
3812 // the receiver is trivial it is not placed on the stack at this point, but
3813 // loaded whenever actually needed.
3814 Handle<String> name;
3815 bool is_trivial_receiver = false;
3816 if (var != NULL) {
3817 name = var->name();
3818 } else {
3819 Literal* lit = prop->key()->AsLiteral();
3820 ASSERT_NOT_NULL(lit);
3821 name = Handle<String>::cast(lit->handle());
3822 // Do not materialize the receiver on the frame if it is trivial.
3823 is_trivial_receiver = prop->obj()->IsTrivial();
3824 if (!is_trivial_receiver) Load(prop->obj());
3825 }
3826
3827 // Change to slow case in the beginning of an initialization block to
3828 // avoid the quadratic behavior of repeatedly adding fast properties.
3829 if (node->starts_initialization_block()) {
3830 // Initialization block consists of assignments of the form expr.x = ..., so
3831 // this will never be an assignment to a variable, so there must be a
3832 // receiver object.
3833 ASSERT_EQ(NULL, var);
3834 if (is_trivial_receiver) {
3835 Load(prop->obj());
3836 } else {
3837 frame_->Dup();
3838 }
3839 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3840 }
3841
3842 // Change to fast case at the end of an initialization block. To prepare for
3843 // that add an extra copy of the receiver to the frame, so that it can be
3844 // converted back to fast case after the assignment.
3845 if (node->ends_initialization_block() && !is_trivial_receiver) {
3846 frame_->Dup();
3847 }
3848
3849 // Stack layout:
3850 // [tos] : receiver (only materialized if non-trivial)
3851 // [tos+1] : receiver if at the end of an initialization block
3852
3853 // Evaluate the right-hand side.
3854 if (node->is_compound()) {
3855 // For a compound assignment the right-hand side is a binary operation
3856 // between the current property value and the actual right-hand side.
3857 if (is_trivial_receiver) {
3858 Load(prop->obj());
3859 } else if (var != NULL) {
3860 LoadGlobal();
3861 } else {
3862 frame_->Dup();
3863 }
3864 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003865
3866 // Perform the binary operation.
3867 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003868 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003869 if (literal != NULL && literal->handle()->IsSmi()) {
3870 SmiOperation(node->binary_op(),
3871 literal->handle(),
3872 false,
3873 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3874 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003875 GenerateInlineSmi inline_smi =
3876 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3877 if (literal != NULL) {
3878 ASSERT(!literal->handle()->IsSmi());
3879 inline_smi = DONT_GENERATE_INLINE_SMI;
3880 }
Steve Block6ded16b2010-05-10 14:33:55 +01003881 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003882 GenericBinaryOperation(node->binary_op(),
3883 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3884 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003885 }
3886 } else {
3887 // For non-compound assignment just load the right-hand side.
3888 Load(node->value());
3889 }
3890
3891 // Stack layout:
3892 // [tos] : value
3893 // [tos+1] : receiver (only materialized if non-trivial)
3894 // [tos+2] : receiver if at the end of an initialization block
3895
3896 // Perform the assignment. It is safe to ignore constants here.
3897 ASSERT(var == NULL || var->mode() != Variable::CONST);
3898 ASSERT_NE(Token::INIT_CONST, node->op());
3899 if (is_trivial_receiver) {
3900 // Load the receiver and swap with the value.
3901 Load(prop->obj());
3902 Register t0 = frame_->PopToRegister();
3903 Register t1 = frame_->PopToRegister(t0);
3904 frame_->EmitPush(t0);
3905 frame_->EmitPush(t1);
3906 }
3907 CodeForSourcePosition(node->position());
3908 bool is_contextual = (var != NULL);
3909 EmitNamedStore(name, is_contextual);
3910 frame_->EmitPush(r0);
3911
3912 // Change to fast case at the end of an initialization block.
3913 if (node->ends_initialization_block()) {
3914 ASSERT_EQ(NULL, var);
3915 // The argument to the runtime call is the receiver.
3916 if (is_trivial_receiver) {
3917 Load(prop->obj());
3918 } else {
3919 // A copy of the receiver is below the value of the assignment. Swap
3920 // the receiver and the value of the assignment expression.
3921 Register t0 = frame_->PopToRegister();
3922 Register t1 = frame_->PopToRegister(t0);
3923 frame_->EmitPush(t0);
3924 frame_->EmitPush(t1);
3925 }
3926 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3927 }
3928
3929 // Stack layout:
3930 // [tos] : result
3931
3932 ASSERT_EQ(original_height + 1, frame_->height());
3933}
3934
3935
3936void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3937#ifdef DEBUG
3938 int original_height = frame_->height();
3939#endif
3940 Comment cmnt(masm_, "[ Keyed Property Assignment");
3941 Property* prop = node->target()->AsProperty();
3942 ASSERT_NOT_NULL(prop);
3943
3944 // Evaluate the receiver subexpression.
3945 Load(prop->obj());
3946
Steve Block8defd9f2010-07-08 12:39:36 +01003947 WriteBarrierCharacter wb_info;
3948
Steve Block6ded16b2010-05-10 14:33:55 +01003949 // Change to slow case in the beginning of an initialization block to
3950 // avoid the quadratic behavior of repeatedly adding fast properties.
3951 if (node->starts_initialization_block()) {
3952 frame_->Dup();
3953 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3954 }
3955
3956 // Change to fast case at the end of an initialization block. To prepare for
3957 // that add an extra copy of the receiver to the frame, so that it can be
3958 // converted back to fast case after the assignment.
3959 if (node->ends_initialization_block()) {
3960 frame_->Dup();
3961 }
3962
3963 // Evaluate the key subexpression.
3964 Load(prop->key());
3965
3966 // Stack layout:
3967 // [tos] : key
3968 // [tos+1] : receiver
3969 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003970 //
Steve Block6ded16b2010-05-10 14:33:55 +01003971 // Evaluate the right-hand side.
3972 if (node->is_compound()) {
3973 // For a compound assignment the right-hand side is a binary operation
3974 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003975 // Duplicate receiver and key for loading the current property value.
3976 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003977 EmitKeyedLoad();
3978 frame_->EmitPush(r0);
3979
3980 // Perform the binary operation.
3981 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003982 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003983 if (literal != NULL && literal->handle()->IsSmi()) {
3984 SmiOperation(node->binary_op(),
3985 literal->handle(),
3986 false,
3987 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3988 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003989 GenerateInlineSmi inline_smi =
3990 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3991 if (literal != NULL) {
3992 ASSERT(!literal->handle()->IsSmi());
3993 inline_smi = DONT_GENERATE_INLINE_SMI;
3994 }
Steve Block6ded16b2010-05-10 14:33:55 +01003995 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003996 GenericBinaryOperation(node->binary_op(),
3997 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3998 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003999 }
Steve Block8defd9f2010-07-08 12:39:36 +01004000 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01004001 } else {
4002 // For non-compound assignment just load the right-hand side.
4003 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01004004 wb_info = node->value()->AsLiteral() != NULL ?
4005 NEVER_NEWSPACE :
4006 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01004007 }
4008
4009 // Stack layout:
4010 // [tos] : value
4011 // [tos+1] : key
4012 // [tos+2] : receiver
4013 // [tos+3] : receiver if at the end of an initialization block
4014
4015 // Perform the assignment. It is safe to ignore constants here.
4016 ASSERT(node->op() != Token::INIT_CONST);
4017 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01004018 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01004019 frame_->EmitPush(r0);
4020
4021 // Stack layout:
4022 // [tos] : result
4023 // [tos+1] : receiver if at the end of an initialization block
4024
4025 // Change to fast case at the end of an initialization block.
4026 if (node->ends_initialization_block()) {
4027 // The argument to the runtime call is the extra copy of the receiver,
4028 // which is below the value of the assignment. Swap the receiver and
4029 // the value of the assignment expression.
4030 Register t0 = frame_->PopToRegister();
4031 Register t1 = frame_->PopToRegister(t0);
4032 frame_->EmitPush(t1);
4033 frame_->EmitPush(t0);
4034 frame_->CallRuntime(Runtime::kToFastProperties, 1);
4035 }
4036
4037 // Stack layout:
4038 // [tos] : result
4039
4040 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004041}
4042
4043
4044void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01004045 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004046#ifdef DEBUG
4047 int original_height = frame_->height();
4048#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004049 Comment cmnt(masm_, "[ Assignment");
4050
Steve Block6ded16b2010-05-10 14:33:55 +01004051 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4052 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00004053
Steve Block6ded16b2010-05-10 14:33:55 +01004054 if (var != NULL && !var->is_global()) {
4055 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004056
Steve Block6ded16b2010-05-10 14:33:55 +01004057 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4058 (var != NULL && var->is_global())) {
4059 // Properties whose keys are property names and global variables are
4060 // treated as named property references. We do not need to consider
4061 // global 'this' because it is not a valid left-hand side.
4062 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004063
Steve Block6ded16b2010-05-10 14:33:55 +01004064 } else if (prop != NULL) {
4065 // Other properties (including rewritten parameters for a function that
4066 // uses arguments) are keyed property assignments.
4067 EmitKeyedPropertyAssignment(node);
4068
4069 } else {
4070 // Invalid left-hand side.
4071 Load(node->target());
4072 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4073 // The runtime call doesn't actually return but the code generator will
4074 // still generate code and expects a certain frame height.
4075 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004076 }
Steve Block6ded16b2010-05-10 14:33:55 +01004077 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004078}
4079
4080
4081void CodeGenerator::VisitThrow(Throw* node) {
4082#ifdef DEBUG
4083 int original_height = frame_->height();
4084#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004085 Comment cmnt(masm_, "[ Throw");
4086
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004087 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00004088 CodeForSourcePosition(node->position());
4089 frame_->CallRuntime(Runtime::kThrow, 1);
4090 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01004091 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004092}
4093
4094
4095void CodeGenerator::VisitProperty(Property* node) {
4096#ifdef DEBUG
4097 int original_height = frame_->height();
4098#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004099 Comment cmnt(masm_, "[ Property");
4100
4101 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01004102 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004103 }
Steve Block6ded16b2010-05-10 14:33:55 +01004104 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004105}
4106
4107
4108void CodeGenerator::VisitCall(Call* node) {
4109#ifdef DEBUG
4110 int original_height = frame_->height();
4111#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004112 Comment cmnt(masm_, "[ Call");
4113
4114 Expression* function = node->expression();
4115 ZoneList<Expression*>* args = node->arguments();
4116
4117 // Standard function call.
4118 // Check if the function is a variable or a property.
4119 Variable* var = function->AsVariableProxy()->AsVariable();
4120 Property* property = function->AsProperty();
4121
4122 // ------------------------------------------------------------------------
4123 // Fast-case: Use inline caching.
4124 // ---
4125 // According to ECMA-262, section 11.2.3, page 44, the function to call
4126 // must be resolved after the arguments have been evaluated. The IC code
4127 // automatically handles this by loading the arguments before the function
4128 // is resolved in cache misses (this also holds for megamorphic calls).
4129 // ------------------------------------------------------------------------
4130
4131 if (var != NULL && var->is_possibly_eval()) {
4132 // ----------------------------------
4133 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4134 // ----------------------------------
4135
4136 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4137 // resolve the function we need to call and the receiver of the
4138 // call. Then we call the resolved function using the given
4139 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004140
Steve Blocka7e24c12009-10-30 11:49:00 +00004141 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004142 Load(function);
4143
4144 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01004145 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004146
4147 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004148 int arg_count = args->length();
4149 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004150 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004151 }
4152
Steve Block8defd9f2010-07-08 12:39:36 +01004153 VirtualFrame::SpilledScope spilled_scope(frame_);
4154
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004155 // If we know that eval can only be shadowed by eval-introduced
4156 // variables we attempt to load the global eval function directly
4157 // in generated code. If we succeed, there is no need to perform a
4158 // context lookup in the runtime system.
4159 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004160 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
4161 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004162 JumpTarget slow;
4163 // Prepare the stack for the call to
4164 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
4165 // function, the first argument to the eval call and the
4166 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004167 LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004168 NOT_INSIDE_TYPEOF,
4169 &slow);
4170 frame_->EmitPush(r0);
4171 if (arg_count > 0) {
4172 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4173 frame_->EmitPush(r1);
4174 } else {
4175 frame_->EmitPush(r2);
4176 }
4177 __ ldr(r1, frame_->Receiver());
4178 frame_->EmitPush(r1);
4179
4180 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
4181
4182 done.Jump();
4183 slow.Bind();
4184 }
4185
4186 // Prepare the stack for the call to ResolvePossiblyDirectEval by
4187 // pushing the loaded function, the first argument to the eval
4188 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004189 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
4190 frame_->EmitPush(r1);
4191 if (arg_count > 0) {
4192 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4193 frame_->EmitPush(r1);
4194 } else {
4195 frame_->EmitPush(r2);
4196 }
Leon Clarkee46be812010-01-19 14:06:41 +00004197 __ ldr(r1, frame_->Receiver());
4198 frame_->EmitPush(r1);
4199
Steve Blocka7e24c12009-10-30 11:49:00 +00004200 // Resolve the call.
Leon Clarkee46be812010-01-19 14:06:41 +00004201 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004202
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004203 // If we generated fast-case code bind the jump-target where fast
4204 // and slow case merge.
4205 if (done.is_linked()) done.Bind();
4206
Steve Blocka7e24c12009-10-30 11:49:00 +00004207 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00004208 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004209 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
4210
4211 // Call the function.
4212 CodeForSourcePosition(node->position());
4213
4214 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004215 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004216 frame_->CallStub(&call_function, arg_count + 1);
4217
4218 __ ldr(cp, frame_->Context());
4219 // Remove the function from the stack.
4220 frame_->Drop();
4221 frame_->EmitPush(r0);
4222
4223 } else if (var != NULL && !var->is_this() && var->is_global()) {
4224 // ----------------------------------
4225 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4226 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00004227 // Pass the global object as the receiver and let the IC stub
4228 // patch the stack to use the global proxy as 'this' in the
4229 // invoked function.
4230 LoadGlobal();
4231
4232 // Load the arguments.
4233 int arg_count = args->length();
4234 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004235 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004236 }
4237
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004238 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00004239 // Setup the name register and call the IC initialization code.
4240 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004241 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004242 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004243 CodeForSourcePosition(node->position());
4244 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4245 arg_count + 1);
4246 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00004247 frame_->EmitPush(r0);
4248
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004249 } else if (var != NULL && var->AsSlot() != NULL &&
4250 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004251 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01004252 // JavaScript examples:
4253 //
4254 // with (obj) foo(1, 2, 3) // foo may be in obj.
4255 //
4256 // function f() {};
4257 // function g() {
4258 // eval(...);
4259 // f(); // f could be in extension object.
4260 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00004261 // ----------------------------------
4262
Kristian Monsen25f61362010-05-21 11:50:48 +01004263 JumpTarget slow, done;
4264
4265 // Generate fast case for loading functions from slots that
4266 // correspond to local/global variables or arguments unless they
4267 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004268 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004269 NOT_INSIDE_TYPEOF,
4270 &slow,
4271 &done);
4272
4273 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004274 // Load the function
4275 frame_->EmitPush(cp);
Iain Merrick75681382010-08-19 15:07:18 +01004276 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004277 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4278 // r0: slot value; r1: receiver
4279
4280 // Load the receiver.
4281 frame_->EmitPush(r0); // function
4282 frame_->EmitPush(r1); // receiver
4283
Kristian Monsen25f61362010-05-21 11:50:48 +01004284 // If fast case code has been generated, emit code to push the
4285 // function and receiver and have the slow path jump around this
4286 // code.
4287 if (done.is_linked()) {
4288 JumpTarget call;
4289 call.Jump();
4290 done.Bind();
4291 frame_->EmitPush(r0); // function
Iain Merrick75681382010-08-19 15:07:18 +01004292 LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver
Kristian Monsen25f61362010-05-21 11:50:48 +01004293 call.Bind();
4294 }
4295
4296 // Call the function. At this point, everything is spilled but the
4297 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004298 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004299 frame_->EmitPush(r0);
4300
4301 } else if (property != NULL) {
4302 // Check if the key is a literal string.
4303 Literal* literal = property->key()->AsLiteral();
4304
4305 if (literal != NULL && literal->handle()->IsSymbol()) {
4306 // ------------------------------------------------------------------
4307 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4308 // ------------------------------------------------------------------
4309
Steve Block6ded16b2010-05-10 14:33:55 +01004310 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004311
Steve Block6ded16b2010-05-10 14:33:55 +01004312 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4313 name->IsEqualTo(CStrVector("apply")) &&
4314 args->length() == 2 &&
4315 args->at(1)->AsVariableProxy() != NULL &&
4316 args->at(1)->AsVariableProxy()->IsArguments()) {
4317 // Use the optimized Function.prototype.apply that avoids
4318 // allocating lazily allocated arguments objects.
4319 CallApplyLazy(property->obj(),
4320 args->at(0),
4321 args->at(1)->AsVariableProxy(),
4322 node->position());
4323
4324 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004325 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004326 // Load the arguments.
4327 int arg_count = args->length();
4328 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004329 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004330 }
4331
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004332 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004333 // Set the name register and call the IC initialization code.
4334 __ mov(r2, Operand(name));
4335 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004336 Handle<Code> stub =
4337 StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Block6ded16b2010-05-10 14:33:55 +01004338 CodeForSourcePosition(node->position());
4339 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4340 __ ldr(cp, frame_->Context());
4341 frame_->EmitPush(r0);
4342 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004343
4344 } else {
4345 // -------------------------------------------
4346 // JavaScript example: 'array[index](1, 2, 3)'
4347 // -------------------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004348
4349 // Load the receiver and name of the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004350 Load(property->obj());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004351 Load(property->key());
4352
Steve Blocka7e24c12009-10-30 11:49:00 +00004353 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004354 EmitKeyedLoad();
4355 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004356 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004357 frame_->EmitPush(r0); // Function.
Iain Merrick75681382010-08-19 15:07:18 +01004358 LoadGlobalReceiver(VirtualFrame::scratch0());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004359 // Call the function.
4360 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4361 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004362 } else {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004363 // Swap the name of the function and the receiver on the stack to follow
4364 // the calling convention for call ICs.
4365 Register key = frame_->PopToRegister();
4366 Register receiver = frame_->PopToRegister(key);
4367 frame_->EmitPush(key);
4368 frame_->EmitPush(receiver);
4369
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004370 // Load the arguments.
4371 int arg_count = args->length();
4372 for (int i = 0; i < arg_count; i++) {
4373 Load(args->at(i));
4374 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004375
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004376 // Load the key into r2 and call the IC initialization code.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004377 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004378 Handle<Code> stub =
4379 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004380 CodeForSourcePosition(node->position());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004381 frame_->SpillAll();
4382 __ ldr(r2, frame_->ElementAt(arg_count + 1));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004383 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004384 frame_->Drop(); // Drop the key still on the stack.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004385 __ ldr(cp, frame_->Context());
4386 frame_->EmitPush(r0);
4387 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004388 }
4389
4390 } else {
4391 // ----------------------------------
4392 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4393 // ----------------------------------
4394
4395 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004396 Load(function);
4397
Steve Blocka7e24c12009-10-30 11:49:00 +00004398 // Pass the global proxy as the receiver.
Iain Merrick75681382010-08-19 15:07:18 +01004399 LoadGlobalReceiver(VirtualFrame::scratch0());
Steve Blocka7e24c12009-10-30 11:49:00 +00004400
4401 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004402 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004403 frame_->EmitPush(r0);
4404 }
Steve Block6ded16b2010-05-10 14:33:55 +01004405 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004406}
4407
4408
4409void CodeGenerator::VisitCallNew(CallNew* node) {
4410#ifdef DEBUG
4411 int original_height = frame_->height();
4412#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004413 Comment cmnt(masm_, "[ CallNew");
4414
4415 // According to ECMA-262, section 11.2.2, page 44, the function
4416 // expression in new calls must be evaluated before the
4417 // arguments. This is different from ordinary calls, where the
4418 // actual function to call is resolved after the arguments have been
4419 // evaluated.
4420
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004421 // Push constructor on the stack. If it's not a function it's used as
4422 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
4423 // ignored.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004424 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004425
4426 // Push the arguments ("left-to-right") on the stack.
4427 ZoneList<Expression*>* args = node->arguments();
4428 int arg_count = args->length();
4429 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004430 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004431 }
4432
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004433 // Spill everything from here to simplify the implementation.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004434 VirtualFrame::SpilledScope spilled_scope(frame_);
4435
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004436 // Load the argument count into r0 and the function into r1 as per
4437 // calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004438 __ mov(r0, Operand(arg_count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004439 __ ldr(r1, frame_->ElementAt(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004440
4441 // Call the construct call builtin that handles allocation and
4442 // constructor invocation.
4443 CodeForSourcePosition(node->position());
4444 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004445 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004446 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004447
Steve Block6ded16b2010-05-10 14:33:55 +01004448 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004449}
4450
4451
4452void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004453 Register scratch = VirtualFrame::scratch0();
4454 JumpTarget null, function, leave, non_function_constructor;
Steve Blocka7e24c12009-10-30 11:49:00 +00004455
Iain Merrick75681382010-08-19 15:07:18 +01004456 // Load the object into register.
4457 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004458 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004459 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004460
4461 // If the object is a smi, we return null.
Iain Merrick75681382010-08-19 15:07:18 +01004462 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004463 null.Branch(eq);
4464
4465 // Check that the object is a JS object but take special care of JS
4466 // functions to make sure they have 'Function' as their class.
Iain Merrick75681382010-08-19 15:07:18 +01004467 __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004468 null.Branch(lt);
4469
4470 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4471 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4472 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004473 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4474 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Iain Merrick75681382010-08-19 15:07:18 +01004475 __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00004476 function.Branch(eq);
4477
4478 // Check if the constructor in the map is a function.
Iain Merrick75681382010-08-19 15:07:18 +01004479 __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset));
4480 __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004481 non_function_constructor.Branch(ne);
4482
Iain Merrick75681382010-08-19 15:07:18 +01004483 // The tos register now contains the constructor function. Grab the
Steve Blocka7e24c12009-10-30 11:49:00 +00004484 // instance class name from there.
Iain Merrick75681382010-08-19 15:07:18 +01004485 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4486 __ ldr(tos,
4487 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4488 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004489 leave.Jump();
4490
4491 // Functions have class 'Function'.
4492 function.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004493 __ mov(tos, Operand(Factory::function_class_symbol()));
4494 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004495 leave.Jump();
4496
4497 // Objects with a non-function constructor have class 'Object'.
4498 non_function_constructor.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004499 __ mov(tos, Operand(Factory::Object_symbol()));
4500 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004501 leave.Jump();
4502
4503 // Non-JS objects have class null.
4504 null.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004505 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4506 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004507
4508 // All done.
4509 leave.Bind();
4510}
4511
4512
4513void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004514 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00004515 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004516
4517 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004518 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004519 Register tos = frame_->PopToRegister(); // tos contains object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004520 // if (object->IsSmi()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004521 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004522 leave.Branch(eq);
4523 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004524 __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004525 leave.Branch(ne);
4526 // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004527 __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004528 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004529 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004530}
4531
4532
4533void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004534 Register scratch1 = VirtualFrame::scratch0();
4535 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004536 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004537
4538 ASSERT(args->length() == 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004539 Load(args->at(0)); // Load the object.
4540 Load(args->at(1)); // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004541 Register value = frame_->PopToRegister();
4542 Register object = frame_->PopToRegister(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004543 // if (object->IsSmi()) return object.
Iain Merrick75681382010-08-19 15:07:18 +01004544 __ tst(object, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 leave.Branch(eq);
4546 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004547 __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004548 leave.Branch(ne);
4549 // Store the value.
Iain Merrick75681382010-08-19 15:07:18 +01004550 __ str(value, FieldMemOperand(object, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004551 // Update the write barrier.
Iain Merrick75681382010-08-19 15:07:18 +01004552 __ RecordWrite(object,
4553 Operand(JSValue::kValueOffset - kHeapObjectTag),
4554 scratch1,
4555 scratch2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004556 // Leave.
4557 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004558 frame_->EmitPush(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004559}
4560
4561
4562void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004563 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004564 Load(args->at(0));
4565 Register reg = frame_->PopToRegister();
4566 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004567 cc_reg_ = eq;
4568}
4569
4570
4571void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004572 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4573 ASSERT_EQ(args->length(), 3);
4574#ifdef ENABLE_LOGGING_AND_PROFILING
4575 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004576 Load(args->at(1));
4577 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004578 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004579 }
4580#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004581 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004582}
4583
4584
4585void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004586 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004587 Load(args->at(0));
4588 Register reg = frame_->PopToRegister();
4589 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004590 cc_reg_ = eq;
4591}
4592
4593
Steve Block8defd9f2010-07-08 12:39:36 +01004594// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004595void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4596 ASSERT(args->length() == 2);
4597 Load(args->at(0));
4598 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004599
4600 if (!CpuFeatures::IsSupported(VFP3)) {
4601 frame_->CallRuntime(Runtime::kMath_pow, 2);
4602 frame_->EmitPush(r0);
4603 } else {
4604 CpuFeatures::Scope scope(VFP3);
4605 JumpTarget runtime, done;
4606 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4607
4608 Register scratch1 = VirtualFrame::scratch0();
4609 Register scratch2 = VirtualFrame::scratch1();
4610
4611 // Get base and exponent to registers.
4612 Register exponent = frame_->PopToRegister();
4613 Register base = frame_->PopToRegister(exponent);
4614 Register heap_number_map = no_reg;
4615
4616 // Set the frame for the runtime jump target. The code below jumps to the
4617 // jump target label so the frame needs to be established before that.
4618 ASSERT(runtime.entry_frame() == NULL);
4619 runtime.set_entry_frame(frame_);
4620
4621 __ BranchOnNotSmi(exponent, &exponent_nonsmi);
4622 __ BranchOnNotSmi(base, &base_nonsmi);
4623
4624 heap_number_map = r6;
4625 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4626
4627 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4628 // d1.
4629 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4630 __ b(&powi);
4631
4632 __ bind(&base_nonsmi);
4633 // Exponent is smi and base is non smi. Get the double value from the base
4634 // into vfp register d1.
4635 __ ObjectToDoubleVFPRegister(base, d1,
4636 scratch1, scratch2, heap_number_map, s0,
4637 runtime.entry_label());
4638
4639 __ bind(&powi);
4640
4641 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004642 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004643
4644 // Get the absolute untagged value of the exponent and use that for the
4645 // calculation.
4646 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004647 // Negate if negative.
4648 __ rsb(scratch1, scratch1, Operand(0, RelocInfo::NONE), LeaveCC, mi);
Steve Block8defd9f2010-07-08 12:39:36 +01004649 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4650
4651 // Run through all the bits in the exponent. The result is calculated in d0
4652 // and d1 holds base^(bit^2).
4653 Label more_bits;
4654 __ bind(&more_bits);
4655 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4656 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4657 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4658 __ b(ne, &more_bits);
4659
4660 // If exponent is positive we are done.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004661 __ cmp(exponent, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004662 __ b(ge, &allocate_return);
4663
4664 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4665 // case). However if d0 has reached infinity this will not provide the
4666 // correct result, so call runtime if that is the case.
4667 __ mov(scratch2, Operand(0x7FF00000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01004668 __ mov(scratch1, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004669 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
4670 __ vcmp(d0, d1);
4671 __ vmrs(pc);
4672 runtime.Branch(eq); // d0 reached infinity.
4673 __ vdiv(d0, d2, d0);
4674 __ b(&allocate_return);
4675
4676 __ bind(&exponent_nonsmi);
4677 // Special handling of raising to the power of -0.5 and 0.5. First check
4678 // that the value is a heap number and that the lower bits (which for both
4679 // values are zero).
4680 heap_number_map = r6;
4681 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4682 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4683 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4684 __ cmp(scratch1, heap_number_map);
4685 runtime.Branch(ne);
4686 __ tst(scratch2, scratch2);
4687 runtime.Branch(ne);
4688
4689 // Load the higher bits (which contains the floating point exponent).
4690 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4691
4692 // Compare exponent with -0.5.
4693 __ cmp(scratch1, Operand(0xbfe00000));
4694 __ b(ne, &not_minus_half);
4695
4696 // Get the double value from the base into vfp register d0.
4697 __ ObjectToDoubleVFPRegister(base, d0,
4698 scratch1, scratch2, heap_number_map, s0,
4699 runtime.entry_label(),
4700 AVOID_NANS_AND_INFINITIES);
4701
4702 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004703 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004704
4705 // Calculate the reciprocal of the square root. 1/sqrt(x) = sqrt(1/x).
4706 __ vdiv(d0, d2, d0);
4707 __ vsqrt(d0, d0);
4708
4709 __ b(&allocate_return);
4710
4711 __ bind(&not_minus_half);
4712 // Compare exponent with 0.5.
4713 __ cmp(scratch1, Operand(0x3fe00000));
4714 runtime.Branch(ne);
4715
4716 // Get the double value from the base into vfp register d0.
4717 __ ObjectToDoubleVFPRegister(base, d0,
4718 scratch1, scratch2, heap_number_map, s0,
4719 runtime.entry_label(),
4720 AVOID_NANS_AND_INFINITIES);
4721 __ vsqrt(d0, d0);
4722
4723 __ bind(&allocate_return);
4724 Register scratch3 = r5;
4725 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4726 heap_number_map, runtime.entry_label());
4727 __ mov(base, scratch3);
4728 done.Jump();
4729
4730 runtime.Bind();
4731
4732 // Push back the arguments again for the runtime call.
4733 frame_->EmitPush(base);
4734 frame_->EmitPush(exponent);
4735 frame_->CallRuntime(Runtime::kMath_pow, 2);
4736 __ Move(base, r0);
4737
4738 done.Bind();
4739 frame_->EmitPush(base);
4740 }
Steve Block6ded16b2010-05-10 14:33:55 +01004741}
4742
4743
Steve Block8defd9f2010-07-08 12:39:36 +01004744// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004745void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4746 ASSERT(args->length() == 1);
4747 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004748
4749 if (!CpuFeatures::IsSupported(VFP3)) {
4750 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4751 frame_->EmitPush(r0);
4752 } else {
4753 CpuFeatures::Scope scope(VFP3);
4754 JumpTarget runtime, done;
4755
4756 Register scratch1 = VirtualFrame::scratch0();
4757 Register scratch2 = VirtualFrame::scratch1();
4758
4759 // Get the value from the frame.
4760 Register tos = frame_->PopToRegister();
4761
4762 // Set the frame for the runtime jump target. The code below jumps to the
4763 // jump target label so the frame needs to be established before that.
4764 ASSERT(runtime.entry_frame() == NULL);
4765 runtime.set_entry_frame(frame_);
4766
4767 Register heap_number_map = r6;
John Reck59135872010-11-02 12:39:01 -07004768 Register new_heap_number = r5;
Steve Block8defd9f2010-07-08 12:39:36 +01004769 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4770
4771 // Get the double value from the heap number into vfp register d0.
4772 __ ObjectToDoubleVFPRegister(tos, d0,
4773 scratch1, scratch2, heap_number_map, s0,
4774 runtime.entry_label());
4775
4776 // Calculate the square root of d0 and place result in a heap number object.
4777 __ vsqrt(d0, d0);
John Reck59135872010-11-02 12:39:01 -07004778 __ AllocateHeapNumberWithValue(new_heap_number,
4779 d0,
4780 scratch1, scratch2,
4781 heap_number_map,
4782 runtime.entry_label());
4783 __ mov(tos, Operand(new_heap_number));
Steve Block8defd9f2010-07-08 12:39:36 +01004784 done.Jump();
4785
4786 runtime.Bind();
4787 // Push back the argument again for the runtime call.
4788 frame_->EmitPush(tos);
4789 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4790 __ Move(tos, r0);
4791
4792 done.Bind();
4793 frame_->EmitPush(tos);
4794 }
Steve Block6ded16b2010-05-10 14:33:55 +01004795}
4796
4797
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004798class DeferredStringCharCodeAt : public DeferredCode {
4799 public:
4800 DeferredStringCharCodeAt(Register object,
4801 Register index,
4802 Register scratch,
4803 Register result)
4804 : result_(result),
4805 char_code_at_generator_(object,
4806 index,
4807 scratch,
4808 result,
4809 &need_conversion_,
4810 &need_conversion_,
4811 &index_out_of_range_,
4812 STRING_INDEX_IS_NUMBER) {}
4813
4814 StringCharCodeAtGenerator* fast_case_generator() {
4815 return &char_code_at_generator_;
4816 }
4817
4818 virtual void Generate() {
4819 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4820 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4821
4822 __ bind(&need_conversion_);
4823 // Move the undefined value into the result register, which will
4824 // trigger conversion.
4825 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4826 __ jmp(exit_label());
4827
4828 __ bind(&index_out_of_range_);
4829 // When the index is out of range, the spec requires us to return
4830 // NaN.
4831 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4832 __ jmp(exit_label());
4833 }
4834
4835 private:
4836 Register result_;
4837
4838 Label need_conversion_;
4839 Label index_out_of_range_;
4840
4841 StringCharCodeAtGenerator char_code_at_generator_;
4842};
4843
4844
4845// This generates code that performs a String.prototype.charCodeAt() call
4846// or returns a smi in order to trigger conversion.
4847void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004848 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004849 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004850
Leon Clarkef7060e22010-06-03 12:02:55 +01004851 Load(args->at(0));
4852 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004853
Iain Merrick75681382010-08-19 15:07:18 +01004854 Register index = frame_->PopToRegister();
4855 Register object = frame_->PopToRegister(index);
Steve Blockd0582a62009-12-15 09:54:21 +00004856
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004857 // We need two extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004858 Register scratch = VirtualFrame::scratch0();
4859 Register result = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004860
4861 DeferredStringCharCodeAt* deferred =
4862 new DeferredStringCharCodeAt(object,
4863 index,
4864 scratch,
4865 result);
4866 deferred->fast_case_generator()->GenerateFast(masm_);
4867 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004868 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004869}
4870
4871
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004872class DeferredStringCharFromCode : public DeferredCode {
4873 public:
4874 DeferredStringCharFromCode(Register code,
4875 Register result)
4876 : char_from_code_generator_(code, result) {}
4877
4878 StringCharFromCodeGenerator* fast_case_generator() {
4879 return &char_from_code_generator_;
4880 }
4881
4882 virtual void Generate() {
4883 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4884 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4885 }
4886
4887 private:
4888 StringCharFromCodeGenerator char_from_code_generator_;
4889};
4890
4891
4892// Generates code for creating a one-char string from a char code.
4893void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004894 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004895 ASSERT(args->length() == 1);
4896
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004897 Load(args->at(0));
4898
Iain Merrick75681382010-08-19 15:07:18 +01004899 Register result = frame_->GetTOSRegister();
4900 Register code = frame_->PopToRegister(result);
Steve Block6ded16b2010-05-10 14:33:55 +01004901
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004902 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4903 code, result);
4904 deferred->fast_case_generator()->GenerateFast(masm_);
4905 deferred->BindExit();
4906 frame_->EmitPush(result);
4907}
4908
4909
4910class DeferredStringCharAt : public DeferredCode {
4911 public:
4912 DeferredStringCharAt(Register object,
4913 Register index,
4914 Register scratch1,
4915 Register scratch2,
4916 Register result)
4917 : result_(result),
4918 char_at_generator_(object,
4919 index,
4920 scratch1,
4921 scratch2,
4922 result,
4923 &need_conversion_,
4924 &need_conversion_,
4925 &index_out_of_range_,
4926 STRING_INDEX_IS_NUMBER) {}
4927
4928 StringCharAtGenerator* fast_case_generator() {
4929 return &char_at_generator_;
4930 }
4931
4932 virtual void Generate() {
4933 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4934 char_at_generator_.GenerateSlow(masm(), call_helper);
4935
4936 __ bind(&need_conversion_);
4937 // Move smi zero into the result register, which will trigger
4938 // conversion.
4939 __ mov(result_, Operand(Smi::FromInt(0)));
4940 __ jmp(exit_label());
4941
4942 __ bind(&index_out_of_range_);
4943 // When the index is out of range, the spec requires us to return
4944 // the empty string.
4945 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4946 __ jmp(exit_label());
4947 }
4948
4949 private:
4950 Register result_;
4951
4952 Label need_conversion_;
4953 Label index_out_of_range_;
4954
4955 StringCharAtGenerator char_at_generator_;
4956};
4957
4958
4959// This generates code that performs a String.prototype.charAt() call
4960// or returns a smi in order to trigger conversion.
4961void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004962 Comment(masm_, "[ GenerateStringCharAt");
4963 ASSERT(args->length() == 2);
4964
4965 Load(args->at(0));
4966 Load(args->at(1));
4967
Iain Merrick75681382010-08-19 15:07:18 +01004968 Register index = frame_->PopToRegister();
4969 Register object = frame_->PopToRegister(index);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004970
4971 // We need three extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004972 Register scratch1 = VirtualFrame::scratch0();
4973 Register scratch2 = VirtualFrame::scratch1();
4974 // Use r6 without notifying the virtual frame.
4975 Register result = r6;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004976
4977 DeferredStringCharAt* deferred =
4978 new DeferredStringCharAt(object,
4979 index,
4980 scratch1,
4981 scratch2,
4982 result);
4983 deferred->fast_case_generator()->GenerateFast(masm_);
4984 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004985 frame_->EmitPush(result);
4986}
4987
4988
Steve Blocka7e24c12009-10-30 11:49:00 +00004989void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004990 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004991 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004992 JumpTarget answer;
4993 // We need the CC bits to come out as not_equal in the case where the
4994 // object is a smi. This can't be done with the usual test opcode so
4995 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004996 Register possible_array = frame_->PopToRegister();
4997 Register scratch = VirtualFrame::scratch0();
4998 __ and_(scratch, possible_array, Operand(kSmiTagMask));
4999 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005000 answer.Branch(ne);
5001 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005002 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005003 answer.Bind();
5004 cc_reg_ = eq;
5005}
5006
5007
Andrei Popescu402d9372010-02-26 13:31:12 +00005008void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005009 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005010 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00005011 JumpTarget answer;
5012 // We need the CC bits to come out as not_equal in the case where the
5013 // object is a smi. This can't be done with the usual test opcode so
5014 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005015 Register possible_regexp = frame_->PopToRegister();
5016 Register scratch = VirtualFrame::scratch0();
5017 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
5018 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00005019 answer.Branch(ne);
5020 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005021 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00005022 answer.Bind();
5023 cc_reg_ = eq;
5024}
5025
5026
Steve Blockd0582a62009-12-15 09:54:21 +00005027void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5028 // This generates a fast version of:
5029 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00005030 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005031 Load(args->at(0));
5032 Register possible_object = frame_->PopToRegister();
5033 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005034 false_target()->Branch(eq);
5035
5036 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005037 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00005038 true_target()->Branch(eq);
5039
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005040 Register map_reg = VirtualFrame::scratch0();
5041 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00005042 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005043 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5044 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01005045 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00005046
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005047 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5048 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005049 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005050 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005051 cc_reg_ = le;
5052}
5053
5054
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005055void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
5056 // This generates a fast version of:
5057 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
5058 // typeof(arg) == function).
5059 // It includes undetectable objects (as opposed to IsObject).
5060 ASSERT(args->length() == 1);
5061 Load(args->at(0));
5062 Register value = frame_->PopToRegister();
5063 __ tst(value, Operand(kSmiTagMask));
5064 false_target()->Branch(eq);
5065 // Check that this is an object.
5066 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
5067 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
5068 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
5069 cc_reg_ = ge;
5070}
5071
5072
Iain Merrick75681382010-08-19 15:07:18 +01005073// Deferred code to check whether the String JavaScript object is safe for using
5074// default value of. This code is called after the bit caching this information
5075// in the map has been checked with the map for the object in the map_result_
5076// register. On return the register map_result_ contains 1 for true and 0 for
5077// false.
5078class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
5079 public:
5080 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
5081 Register map_result,
5082 Register scratch1,
5083 Register scratch2)
5084 : object_(object),
5085 map_result_(map_result),
5086 scratch1_(scratch1),
5087 scratch2_(scratch2) { }
5088
5089 virtual void Generate() {
5090 Label false_result;
5091
5092 // Check that map is loaded as expected.
5093 if (FLAG_debug_code) {
5094 __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
5095 __ cmp(map_result_, ip);
5096 __ Assert(eq, "Map not in expected register");
5097 }
5098
5099 // Check for fast case object. Generate false result for slow case object.
5100 __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
5101 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5102 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
5103 __ cmp(scratch1_, ip);
5104 __ b(eq, &false_result);
5105
5106 // Look for valueOf symbol in the descriptor array, and indicate false if
5107 // found. The type is not checked, so if it is a transition it is a false
5108 // negative.
5109 __ ldr(map_result_,
5110 FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
5111 __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
5112 // map_result_: descriptor array
5113 // scratch2_: length of descriptor array
5114 // Calculate the end of the descriptor array.
5115 STATIC_ASSERT(kSmiTag == 0);
5116 STATIC_ASSERT(kSmiTagSize == 1);
5117 STATIC_ASSERT(kPointerSize == 4);
5118 __ add(scratch1_,
5119 map_result_,
5120 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5121 __ add(scratch1_,
5122 scratch1_,
5123 Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
5124
5125 // Calculate location of the first key name.
5126 __ add(map_result_,
5127 map_result_,
5128 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5129 DescriptorArray::kFirstIndex * kPointerSize));
5130 // Loop through all the keys in the descriptor array. If one of these is the
5131 // symbol valueOf the result is false.
5132 Label entry, loop;
5133 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5134 // used in the loop below.
5135 __ mov(ip, Operand(Factory::value_of_symbol()));
5136 __ jmp(&entry);
5137 __ bind(&loop);
5138 __ ldr(scratch2_, MemOperand(map_result_, 0));
5139 __ cmp(scratch2_, ip);
5140 __ b(eq, &false_result);
5141 __ add(map_result_, map_result_, Operand(kPointerSize));
5142 __ bind(&entry);
5143 __ cmp(map_result_, Operand(scratch1_));
5144 __ b(ne, &loop);
5145
5146 // Reload map as register map_result_ was used as temporary above.
5147 __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5148
5149 // If a valueOf property is not found on the object check that it's
5150 // prototype is the un-modified String prototype. If not result is false.
5151 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
5152 __ tst(scratch1_, Operand(kSmiTagMask));
5153 __ b(eq, &false_result);
5154 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5155 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005156 ContextOperand(cp, Context::GLOBAL_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01005157 __ ldr(scratch2_,
5158 FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
5159 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005160 ContextOperand(
Iain Merrick75681382010-08-19 15:07:18 +01005161 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
5162 __ cmp(scratch1_, scratch2_);
5163 __ b(ne, &false_result);
5164
5165 // Set the bit in the map to indicate that it has been checked safe for
5166 // default valueOf and set true result.
Ben Murdochb0fe1622011-05-05 13:52:32 +01005167 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005168 __ orr(scratch1_,
5169 scratch1_,
5170 Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
Ben Murdochb0fe1622011-05-05 13:52:32 +01005171 __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005172 __ mov(map_result_, Operand(1));
5173 __ jmp(exit_label());
5174 __ bind(&false_result);
5175 // Set false result.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005176 __ mov(map_result_, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +01005177 }
5178
5179 private:
5180 Register object_;
5181 Register map_result_;
5182 Register scratch1_;
5183 Register scratch2_;
5184};
5185
5186
5187void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
5188 ZoneList<Expression*>* args) {
5189 ASSERT(args->length() == 1);
5190 Load(args->at(0));
5191 Register obj = frame_->PopToRegister(); // Pop the string wrapper.
5192 if (FLAG_debug_code) {
5193 __ AbortIfSmi(obj);
5194 }
5195
5196 // Check whether this map has already been checked to be safe for default
5197 // valueOf.
5198 Register map_result = VirtualFrame::scratch0();
5199 __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
5200 __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
5201 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5202 true_target()->Branch(ne);
5203
5204 // We need an additional two scratch registers for the deferred code.
5205 Register scratch1 = VirtualFrame::scratch1();
5206 // Use r6 without notifying the virtual frame.
5207 Register scratch2 = r6;
5208
5209 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
5210 new DeferredIsStringWrapperSafeForDefaultValueOf(
5211 obj, map_result, scratch1, scratch2);
5212 deferred->Branch(eq);
5213 deferred->BindExit();
5214 __ tst(map_result, Operand(map_result));
5215 cc_reg_ = ne;
5216}
5217
5218
Steve Blockd0582a62009-12-15 09:54:21 +00005219void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5220 // This generates a fast version of:
5221 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00005222 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005223 Load(args->at(0));
5224 Register possible_function = frame_->PopToRegister();
5225 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005226 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005227 Register map_reg = VirtualFrame::scratch0();
5228 Register scratch = VirtualFrame::scratch1();
5229 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005230 cc_reg_ = eq;
5231}
5232
5233
Leon Clarked91b9f72010-01-27 17:25:45 +00005234void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005235 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005236 Load(args->at(0));
5237 Register possible_undetectable = frame_->PopToRegister();
5238 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00005239 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005240 Register scratch = VirtualFrame::scratch0();
5241 __ ldr(scratch,
5242 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
5243 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5244 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00005245 cc_reg_ = ne;
5246}
5247
5248
Steve Blocka7e24c12009-10-30 11:49:00 +00005249void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005250 ASSERT(args->length() == 0);
5251
Leon Clarkef7060e22010-06-03 12:02:55 +01005252 Register scratch0 = VirtualFrame::scratch0();
5253 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005254 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005255 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005256
5257 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01005258 __ ldr(scratch1,
5259 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5260 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5261 __ ldr(scratch0,
5262 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00005263
5264 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005265 __ ldr(scratch1,
5266 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
5267 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005268 cc_reg_ = eq;
5269}
5270
5271
5272void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005273 ASSERT(args->length() == 0);
5274
Leon Clarkef7060e22010-06-03 12:02:55 +01005275 Register tos = frame_->GetTOSRegister();
5276 Register scratch0 = VirtualFrame::scratch0();
5277 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005278
Steve Block6ded16b2010-05-10 14:33:55 +01005279 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005280 __ ldr(scratch0,
5281 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5282 __ ldr(scratch1,
5283 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5284 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5285
5286 // Get the number of formal parameters.
5287 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005288
5289 // Arguments adaptor case: Read the arguments length from the
5290 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005291 __ ldr(tos,
5292 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
5293 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005294
Leon Clarkef7060e22010-06-03 12:02:55 +01005295 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00005296}
5297
5298
Steve Block6ded16b2010-05-10 14:33:55 +01005299void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005300 ASSERT(args->length() == 1);
5301
5302 // Satisfy contract with ArgumentsAccessStub:
5303 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005304 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01005305 frame_->PopToR1();
5306 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005307 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005308
5309 // Call the shared stub to get to arguments[key].
5310 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5311 frame_->CallStub(&stub, 0);
5312 frame_->EmitPush(r0);
5313}
5314
5315
Steve Block6ded16b2010-05-10 14:33:55 +01005316void CodeGenerator::GenerateRandomHeapNumber(
5317 ZoneList<Expression*>* args) {
5318 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005319 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005320
5321 Label slow_allocate_heapnumber;
5322 Label heapnumber_allocated;
5323
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01005324 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
5325 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01005326 __ jmp(&heapnumber_allocated);
5327
5328 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005329 // Allocate a heap number.
5330 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005331 __ mov(r4, Operand(r0));
5332
5333 __ bind(&heapnumber_allocated);
5334
5335 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5336 // by computing:
5337 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5338 if (CpuFeatures::IsSupported(VFP3)) {
5339 __ PrepareCallCFunction(0, r1);
5340 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5341
5342 CpuFeatures::Scope scope(VFP3);
5343 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5344 // Create this constant using mov/orr to avoid PC relative load.
5345 __ mov(r1, Operand(0x41000000));
5346 __ orr(r1, r1, Operand(0x300000));
5347 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5348 __ vmov(d7, r0, r1);
5349 // Move 0x4130000000000000 to VFP.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005350 __ mov(r0, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01005351 __ vmov(d8, r0, r1);
5352 // Subtract and store the result in the heap number.
5353 __ vsub(d7, d7, d8);
5354 __ sub(r0, r4, Operand(kHeapObjectTag));
5355 __ vstr(d7, r0, HeapNumber::kValueOffset);
5356 frame_->EmitPush(r4);
5357 } else {
5358 __ mov(r0, Operand(r4));
5359 __ PrepareCallCFunction(1, r1);
5360 __ CallCFunction(
5361 ExternalReference::fill_heap_number_with_random_function(), 1);
5362 frame_->EmitPush(r0);
5363 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005364}
5365
5366
Steve Blockd0582a62009-12-15 09:54:21 +00005367void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5368 ASSERT_EQ(2, args->length());
5369
5370 Load(args->at(0));
5371 Load(args->at(1));
5372
Andrei Popescu31002712010-02-23 13:46:05 +00005373 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005374 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005375 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00005376 frame_->EmitPush(r0);
5377}
5378
5379
Leon Clarkee46be812010-01-19 14:06:41 +00005380void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5381 ASSERT_EQ(3, args->length());
5382
5383 Load(args->at(0));
5384 Load(args->at(1));
5385 Load(args->at(2));
5386
Andrei Popescu31002712010-02-23 13:46:05 +00005387 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005388 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005389 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00005390 frame_->EmitPush(r0);
5391}
5392
5393
5394void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5395 ASSERT_EQ(2, args->length());
5396
5397 Load(args->at(0));
5398 Load(args->at(1));
5399
Leon Clarked91b9f72010-01-27 17:25:45 +00005400 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005401 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00005402 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005403 frame_->EmitPush(r0);
5404}
5405
5406
5407void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5408 ASSERT_EQ(4, args->length());
5409
5410 Load(args->at(0));
5411 Load(args->at(1));
5412 Load(args->at(2));
5413 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01005414 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005415 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005416 frame_->CallStub(&stub, 4);
5417 frame_->EmitPush(r0);
5418}
Leon Clarkee46be812010-01-19 14:06:41 +00005419
Steve Block6ded16b2010-05-10 14:33:55 +01005420
5421void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01005422 ASSERT_EQ(3, args->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005423
Steve Block6ded16b2010-05-10 14:33:55 +01005424 Load(args->at(0)); // Size of array, smi.
5425 Load(args->at(1)); // "index" property value.
5426 Load(args->at(2)); // "input" property value.
Ben Murdochb0fe1622011-05-05 13:52:32 +01005427 RegExpConstructResultStub stub;
5428 frame_->SpillAll();
5429 frame_->CallStub(&stub, 3);
Steve Block6ded16b2010-05-10 14:33:55 +01005430 frame_->EmitPush(r0);
5431}
5432
5433
5434class DeferredSearchCache: public DeferredCode {
5435 public:
5436 DeferredSearchCache(Register dst, Register cache, Register key)
5437 : dst_(dst), cache_(cache), key_(key) {
5438 set_comment("[ DeferredSearchCache");
5439 }
5440
5441 virtual void Generate();
5442
5443 private:
5444 Register dst_, cache_, key_;
5445};
5446
5447
5448void DeferredSearchCache::Generate() {
5449 __ Push(cache_, key_);
5450 __ CallRuntime(Runtime::kGetFromCache, 2);
Iain Merrick75681382010-08-19 15:07:18 +01005451 __ Move(dst_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01005452}
5453
5454
5455void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5456 ASSERT_EQ(2, args->length());
5457
5458 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5459 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5460
5461 Handle<FixedArray> jsfunction_result_caches(
5462 Top::global_context()->jsfunction_result_caches());
5463 if (jsfunction_result_caches->length() <= cache_id) {
5464 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005465 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005466 return;
5467 }
5468
5469 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005470
Iain Merrick75681382010-08-19 15:07:18 +01005471 frame_->PopToR1();
5472 frame_->SpillAll();
5473 Register key = r1; // Just poped to r1
5474 Register result = r0; // Free, as frame has just been spilled.
5475 Register scratch1 = VirtualFrame::scratch0();
5476 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005477
Iain Merrick75681382010-08-19 15:07:18 +01005478 __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX));
5479 __ ldr(scratch1,
5480 FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset));
5481 __ ldr(scratch1,
5482 ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5483 __ ldr(scratch1,
5484 FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id)));
Steve Block6ded16b2010-05-10 14:33:55 +01005485
Iain Merrick75681382010-08-19 15:07:18 +01005486 DeferredSearchCache* deferred =
5487 new DeferredSearchCache(result, scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01005488
5489 const int kFingerOffset =
5490 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005491 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +01005492 __ ldr(result, FieldMemOperand(scratch1, kFingerOffset));
5493 // result now holds finger offset as a smi.
5494 __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5495 // scratch2 now points to the start of fixed array elements.
5496 __ ldr(result,
5497 MemOperand(
5498 scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5499 // Note side effect of PreIndex: scratch2 now points to the key of the pair.
5500 __ cmp(key, result);
Steve Block6ded16b2010-05-10 14:33:55 +01005501 deferred->Branch(ne);
5502
Iain Merrick75681382010-08-19 15:07:18 +01005503 __ ldr(result, MemOperand(scratch2, kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005504
5505 deferred->BindExit();
Iain Merrick75681382010-08-19 15:07:18 +01005506 frame_->EmitPush(result);
Leon Clarkee46be812010-01-19 14:06:41 +00005507}
5508
5509
Andrei Popescu402d9372010-02-26 13:31:12 +00005510void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5511 ASSERT_EQ(args->length(), 1);
5512
5513 // Load the argument on the stack and jump to the runtime.
5514 Load(args->at(0));
5515
Steve Block6ded16b2010-05-10 14:33:55 +01005516 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005517 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005518 frame_->CallStub(&stub, 1);
5519 frame_->EmitPush(r0);
5520}
5521
5522
5523class DeferredSwapElements: public DeferredCode {
5524 public:
5525 DeferredSwapElements(Register object, Register index1, Register index2)
5526 : object_(object), index1_(index1), index2_(index2) {
5527 set_comment("[ DeferredSwapElements");
5528 }
5529
5530 virtual void Generate();
5531
5532 private:
5533 Register object_, index1_, index2_;
5534};
5535
5536
5537void DeferredSwapElements::Generate() {
5538 __ push(object_);
5539 __ push(index1_);
5540 __ push(index2_);
5541 __ CallRuntime(Runtime::kSwapElements, 3);
5542}
5543
5544
5545void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5546 Comment cmnt(masm_, "[ GenerateSwapElements");
5547
5548 ASSERT_EQ(3, args->length());
5549
5550 Load(args->at(0));
5551 Load(args->at(1));
5552 Load(args->at(2));
5553
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005554 VirtualFrame::SpilledScope spilled_scope(frame_);
5555
Steve Block6ded16b2010-05-10 14:33:55 +01005556 Register index2 = r2;
5557 Register index1 = r1;
5558 Register object = r0;
5559 Register tmp1 = r3;
5560 Register tmp2 = r4;
5561
5562 frame_->EmitPop(index2);
5563 frame_->EmitPop(index1);
5564 frame_->EmitPop(object);
5565
5566 DeferredSwapElements* deferred =
5567 new DeferredSwapElements(object, index1, index2);
5568
5569 // Fetch the map and check if array is in fast case.
5570 // Check that object doesn't require security checks and
5571 // has no indexed interceptor.
5572 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5573 deferred->Branch(lt);
5574 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5575 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
5576 deferred->Branch(nz);
5577
Iain Merrick75681382010-08-19 15:07:18 +01005578 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01005579 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5580 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5581 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5582 __ cmp(tmp2, ip);
5583 deferred->Branch(ne);
5584
5585 // Smi-tagging is equivalent to multiplying by 2.
5586 STATIC_ASSERT(kSmiTag == 0);
5587 STATIC_ASSERT(kSmiTagSize == 1);
5588
5589 // Check that both indices are smis.
5590 __ mov(tmp2, index1);
5591 __ orr(tmp2, tmp2, index2);
5592 __ tst(tmp2, Operand(kSmiTagMask));
5593 deferred->Branch(nz);
5594
Ben Murdochdb5a90a2011-01-06 18:27:03 +00005595 // Check that both indices are valid.
5596 __ ldr(tmp2, FieldMemOperand(object, JSArray::kLengthOffset));
5597 __ cmp(tmp2, index1);
5598 __ cmp(tmp2, index2, hi);
5599 deferred->Branch(ls);
5600
Steve Block6ded16b2010-05-10 14:33:55 +01005601 // Bring the offsets into the fixed array in tmp1 into index1 and
5602 // index2.
5603 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5604 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5605 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5606
5607 // Swap elements.
5608 Register tmp3 = object;
5609 object = no_reg;
5610 __ ldr(tmp3, MemOperand(tmp1, index1));
5611 __ ldr(tmp2, MemOperand(tmp1, index2));
5612 __ str(tmp3, MemOperand(tmp1, index2));
5613 __ str(tmp2, MemOperand(tmp1, index1));
5614
5615 Label done;
5616 __ InNewSpace(tmp1, tmp2, eq, &done);
5617 // Possible optimization: do a check that both values are Smis
5618 // (or them and test against Smi mask.)
5619
5620 __ mov(tmp2, tmp1);
5621 RecordWriteStub recordWrite1(tmp1, index1, tmp3);
5622 __ CallStub(&recordWrite1);
5623
5624 RecordWriteStub recordWrite2(tmp2, index2, tmp3);
5625 __ CallStub(&recordWrite2);
5626
5627 __ bind(&done);
5628
5629 deferred->BindExit();
5630 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5631 frame_->EmitPush(tmp1);
5632}
5633
5634
5635void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5636 Comment cmnt(masm_, "[ GenerateCallFunction");
5637
5638 ASSERT(args->length() >= 2);
5639
5640 int n_args = args->length() - 2; // for receiver and function.
5641 Load(args->at(0)); // receiver
5642 for (int i = 0; i < n_args; i++) {
5643 Load(args->at(i + 1));
5644 }
5645 Load(args->at(n_args + 1)); // function
5646 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005647 frame_->EmitPush(r0);
5648}
5649
5650
5651void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5652 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005653 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005654 if (CpuFeatures::IsSupported(VFP3)) {
5655 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5656 frame_->SpillAllButCopyTOSToR0();
5657 frame_->CallStub(&stub, 1);
5658 } else {
5659 frame_->CallRuntime(Runtime::kMath_sin, 1);
5660 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005661 frame_->EmitPush(r0);
5662}
5663
5664
5665void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5666 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005667 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005668 if (CpuFeatures::IsSupported(VFP3)) {
5669 TranscendentalCacheStub stub(TranscendentalCache::COS);
5670 frame_->SpillAllButCopyTOSToR0();
5671 frame_->CallStub(&stub, 1);
5672 } else {
5673 frame_->CallRuntime(Runtime::kMath_cos, 1);
5674 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005675 frame_->EmitPush(r0);
5676}
5677
5678
Ben Murdochb0fe1622011-05-05 13:52:32 +01005679void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
5680 ASSERT_EQ(args->length(), 1);
5681 Load(args->at(0));
5682 if (CpuFeatures::IsSupported(VFP3)) {
5683 TranscendentalCacheStub stub(TranscendentalCache::LOG);
5684 frame_->SpillAllButCopyTOSToR0();
5685 frame_->CallStub(&stub, 1);
5686 } else {
5687 frame_->CallRuntime(Runtime::kMath_log, 1);
5688 }
5689 frame_->EmitPush(r0);
5690}
5691
5692
Steve Blocka7e24c12009-10-30 11:49:00 +00005693void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005694 ASSERT(args->length() == 2);
5695
5696 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005697 Load(args->at(0));
5698 Load(args->at(1));
5699 Register lhs = frame_->PopToRegister();
5700 Register rhs = frame_->PopToRegister(lhs);
5701 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005702 cc_reg_ = eq;
5703}
5704
5705
Ben Murdochbb769b22010-08-11 14:56:33 +01005706void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
5707 ASSERT(args->length() == 2);
5708
5709 // Load the two objects into registers and perform the comparison.
5710 Load(args->at(0));
5711 Load(args->at(1));
5712 Register right = frame_->PopToRegister();
5713 Register left = frame_->PopToRegister(right);
5714 Register tmp = frame_->scratch0();
5715 Register tmp2 = frame_->scratch1();
5716
5717 // Jumps to done must have the eq flag set if the test is successful
5718 // and clear if the test has failed.
5719 Label done;
5720
5721 // Fail if either is a non-HeapObject.
5722 __ cmp(left, Operand(right));
5723 __ b(eq, &done);
5724 __ and_(tmp, left, Operand(right));
5725 __ eor(tmp, tmp, Operand(kSmiTagMask));
5726 __ tst(tmp, Operand(kSmiTagMask));
5727 __ b(ne, &done);
5728 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
5729 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
5730 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
5731 __ b(ne, &done);
5732 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
5733 __ cmp(tmp, Operand(tmp2));
5734 __ b(ne, &done);
5735 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
5736 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
5737 __ cmp(tmp, tmp2);
5738 __ bind(&done);
5739 cc_reg_ = eq;
5740}
5741
5742
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005743void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
5744 ASSERT(args->length() == 1);
5745 Load(args->at(0));
5746 Register value = frame_->PopToRegister();
5747 Register tmp = frame_->scratch0();
5748 __ ldr(tmp, FieldMemOperand(value, String::kHashFieldOffset));
5749 __ tst(tmp, Operand(String::kContainsCachedArrayIndexMask));
5750 cc_reg_ = eq;
5751}
5752
5753
5754void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
5755 ASSERT(args->length() == 1);
5756 Load(args->at(0));
5757 Register value = frame_->PopToRegister();
5758
5759 __ ldr(value, FieldMemOperand(value, String::kHashFieldOffset));
5760 __ IndexFromHash(value, value);
5761 frame_->EmitPush(value);
5762}
5763
Ben Murdochbb769b22010-08-11 14:56:33 +01005764
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005765void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
5766 ASSERT(args->length() == 2);
5767 Load(args->at(0));
5768 Register value = frame_->PopToRegister();
5769 __ LoadRoot(value, Heap::kUndefinedValueRootIndex);
5770 frame_->EmitPush(value);
5771}
5772
5773
Steve Blocka7e24c12009-10-30 11:49:00 +00005774void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5775#ifdef DEBUG
5776 int original_height = frame_->height();
5777#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005778 if (CheckForInlineRuntimeCall(node)) {
5779 ASSERT((has_cc() && frame_->height() == original_height) ||
5780 (!has_cc() && frame_->height() == original_height + 1));
5781 return;
5782 }
5783
5784 ZoneList<Expression*>* args = node->arguments();
5785 Comment cmnt(masm_, "[ CallRuntime");
5786 Runtime::Function* function = node->function();
5787
5788 if (function == NULL) {
5789 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005790 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005791 Register scratch = VirtualFrame::scratch0();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005792 __ ldr(scratch, GlobalObjectOperand());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005793 Register builtins = frame_->GetTOSRegister();
5794 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5795 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005796 }
5797
5798 // Push the arguments ("left-to-right").
5799 int arg_count = args->length();
5800 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005801 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005802 }
5803
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005804 VirtualFrame::SpilledScope spilled_scope(frame_);
5805
Steve Blocka7e24c12009-10-30 11:49:00 +00005806 if (function == NULL) {
5807 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005808 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005809 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005810 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00005811 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5812 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005813 frame_->EmitPush(r0);
5814 } else {
5815 // Call the C runtime function.
5816 frame_->CallRuntime(function, arg_count);
5817 frame_->EmitPush(r0);
5818 }
Steve Block6ded16b2010-05-10 14:33:55 +01005819 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005820}
5821
5822
5823void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5824#ifdef DEBUG
5825 int original_height = frame_->height();
5826#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005827 Comment cmnt(masm_, "[ UnaryOperation");
5828
5829 Token::Value op = node->op();
5830
5831 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005832 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005833 // LoadCondition may (and usually does) leave a test and branch to
5834 // be emitted by the caller. In that case, negate the condition.
5835 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5836
5837 } else if (op == Token::DELETE) {
5838 Property* property = node->expression()->AsProperty();
5839 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5840 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005841 Load(property->obj());
5842 Load(property->key());
Steve Blockd0582a62009-12-15 09:54:21 +00005843 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005844 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005845
5846 } else if (variable != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005847 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00005848 if (variable->is_global()) {
5849 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005850 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005851 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005852 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005853
5854 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5855 // lookup the context holding the named variable
5856 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005857 frame_->EmitPush(Operand(variable->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005858 frame_->CallRuntime(Runtime::kLookupContext, 2);
5859 // r0: context
5860 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005861 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005862 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005863 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005864
5865 } else {
5866 // Default: Result of deleting non-global, not dynamically
5867 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005868 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005869 }
5870
5871 } else {
5872 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005873 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005874 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005875 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005876 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005877
5878 } else if (op == Token::TYPEOF) {
5879 // Special case for loading the typeof expression; see comment on
5880 // LoadTypeofExpression().
5881 LoadTypeofExpression(node->expression());
5882 frame_->CallRuntime(Runtime::kTypeof, 1);
5883 frame_->EmitPush(r0); // r0 has result
5884
5885 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005886 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01005887 UnaryOverwriteMode overwrite =
5888 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5889
5890 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005891 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005892 switch (op) {
5893 case Token::NOT:
5894 case Token::DELETE:
5895 case Token::TYPEOF:
5896 UNREACHABLE(); // handled above
5897 break;
5898
5899 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005900 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005901 GenericUnaryOpStub stub(
5902 Token::SUB,
5903 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005904 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01005905 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005906 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005907 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005908 break;
5909 }
5910
5911 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005912 Register tos = frame_->PopToRegister();
5913 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005914 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005915 // Smi check.
5916 __ tst(tos, Operand(kSmiTagMask));
5917 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00005918
Steve Block8defd9f2010-07-08 12:39:36 +01005919 __ mvn(tos, Operand(tos));
5920 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5921 frame_->EmitPush(tos);
5922 // The fast case is the first to jump to the continue label, so it gets
5923 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00005924 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005925
Steve Block8defd9f2010-07-08 12:39:36 +01005926 not_smi_label.Bind();
5927 frame_->SpillAll();
5928 __ Move(r0, tos);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005929 GenericUnaryOpStub stub(Token::BIT_NOT,
5930 overwrite,
5931 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block8defd9f2010-07-08 12:39:36 +01005932 frame_->CallStub(&stub, 0);
5933 frame_->EmitPush(r0);
5934
Steve Blocka7e24c12009-10-30 11:49:00 +00005935 continue_label.Bind();
5936 break;
5937 }
5938
5939 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005940 frame_->Drop();
5941 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005942 break;
5943
5944 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01005945 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00005946 // Smi check.
5947 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005948 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005949 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01005950
Steve Blockd0582a62009-12-15 09:54:21 +00005951 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01005952 frame_->EmitPush(r0);
5953
Steve Blocka7e24c12009-10-30 11:49:00 +00005954 continue_label.Bind();
5955 break;
5956 }
5957 default:
5958 UNREACHABLE();
5959 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005960 }
5961 ASSERT(!has_valid_frame() ||
5962 (has_cc() && frame_->height() == original_height) ||
5963 (!has_cc() && frame_->height() == original_height + 1));
5964}
5965
5966
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005967class DeferredCountOperation: public DeferredCode {
5968 public:
5969 DeferredCountOperation(Register value,
5970 bool is_increment,
5971 bool is_postfix,
5972 int target_size)
5973 : value_(value),
5974 is_increment_(is_increment),
5975 is_postfix_(is_postfix),
5976 target_size_(target_size) {}
5977
5978 virtual void Generate() {
5979 VirtualFrame copied_frame(*frame_state()->frame());
5980
5981 Label slow;
5982 // Check for smi operand.
5983 __ tst(value_, Operand(kSmiTagMask));
5984 __ b(ne, &slow);
5985
5986 // Revert optimistic increment/decrement.
5987 if (is_increment_) {
5988 __ sub(value_, value_, Operand(Smi::FromInt(1)));
5989 } else {
5990 __ add(value_, value_, Operand(Smi::FromInt(1)));
5991 }
5992
5993 // Slow case: Convert to number. At this point the
5994 // value to be incremented is in the value register..
5995 __ bind(&slow);
5996
5997 // Convert the operand to a number.
5998 copied_frame.EmitPush(value_);
5999
6000 copied_frame.InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
6001
6002 if (is_postfix_) {
6003 // Postfix: store to result (on the stack).
6004 __ str(r0, MemOperand(sp, target_size_ * kPointerSize));
6005 }
6006
6007 copied_frame.EmitPush(r0);
6008 copied_frame.EmitPush(Operand(Smi::FromInt(1)));
6009
6010 if (is_increment_) {
6011 copied_frame.CallRuntime(Runtime::kNumberAdd, 2);
6012 } else {
6013 copied_frame.CallRuntime(Runtime::kNumberSub, 2);
6014 }
6015
6016 __ Move(value_, r0);
6017
6018 copied_frame.MergeTo(frame_state()->frame());
6019 }
6020
6021 private:
6022 Register value_;
6023 bool is_increment_;
6024 bool is_postfix_;
6025 int target_size_;
6026};
6027
6028
Steve Blocka7e24c12009-10-30 11:49:00 +00006029void CodeGenerator::VisitCountOperation(CountOperation* node) {
6030#ifdef DEBUG
6031 int original_height = frame_->height();
6032#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006033 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01006034 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006035
6036 bool is_postfix = node->is_postfix();
6037 bool is_increment = node->op() == Token::INC;
6038
6039 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
6040 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006041 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006042
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006043 if (!is_const && is_slot && type_info(var->AsSlot()).IsSmi()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006044 // The type info declares that this variable is always a Smi. That
6045 // means it is a Smi both before and after the increment/decrement.
6046 // Lets make use of that to make a very minimal count.
6047 Reference target(this, node->expression(), !is_const);
6048 ASSERT(!target.is_illegal());
6049 target.GetValue(); // Pushes the value.
6050 Register value = frame_->PopToRegister();
6051 if (is_postfix) frame_->EmitPush(value);
6052 if (is_increment) {
6053 __ add(value, value, Operand(Smi::FromInt(1)));
6054 } else {
6055 __ sub(value, value, Operand(Smi::FromInt(1)));
6056 }
6057 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006058 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006059 if (is_postfix) frame_->Pop();
6060 ASSERT_EQ(original_height + 1, frame_->height());
6061 return;
6062 }
6063
6064 // If it's a postfix expression and its result is not ignored and the
6065 // reference is non-trivial, then push a placeholder on the stack now
6066 // to hold the result of the expression.
6067 bool placeholder_pushed = false;
6068 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006069 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006070 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00006071 }
6072
Leon Clarked91b9f72010-01-27 17:25:45 +00006073 // A constant reference is not saved to, so a constant reference is not a
6074 // compound assignment reference.
6075 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00006076 if (target.is_illegal()) {
6077 // Spoof the virtual frame to have the expected height (one higher
6078 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006079 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01006080 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006081 return;
6082 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006083
Kristian Monsen25f61362010-05-21 11:50:48 +01006084 // This pushes 0, 1 or 2 words on the object to be used later when updating
6085 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01006086 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006087
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006088 bool value_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006089 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00006090
6091 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006092 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006093 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006094 } else if (is_postfix) {
6095 frame_->EmitPush(value);
6096 __ mov(VirtualFrame::scratch0(), value);
6097 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006098 }
6099
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006100 // We can't use any type information here since the virtual frame from the
6101 // deferred code may have lost information and we can't merge a virtual
6102 // frame with less specific type knowledge to a virtual frame with more
6103 // specific knowledge that has already used that specific knowledge to
6104 // generate code.
6105 frame_->ForgetTypeInfo();
6106
6107 // The constructor here will capture the current virtual frame and use it to
6108 // merge to after the deferred code has run. No virtual frame changes are
6109 // allowed from here until the 'BindExit' below.
6110 DeferredCode* deferred =
6111 new DeferredCountOperation(value,
6112 is_increment,
6113 is_postfix,
6114 target.size());
6115 if (!value_is_known_smi) {
6116 // Check for smi operand.
6117 __ tst(value, Operand(kSmiTagMask));
6118
6119 deferred->Branch(ne);
6120 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006121
Steve Blocka7e24c12009-10-30 11:49:00 +00006122 // Perform optimistic increment/decrement.
6123 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006124 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006125 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006126 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006127 }
6128
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006129 // If increment/decrement overflows, go to deferred code.
6130 deferred->Branch(vs);
Steve Blocka7e24c12009-10-30 11:49:00 +00006131
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006132 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006133
Steve Blocka7e24c12009-10-30 11:49:00 +00006134 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01006135 // At this point the answer is in the value register.
Kristian Monsen25f61362010-05-21 11:50:48 +01006136 frame_->EmitPush(value);
6137 // Set the target with the result, leaving the result on
6138 // top of the stack. Removes the target from the stack if
6139 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01006140 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00006141 }
6142
6143 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01006144 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01006145 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006146}
6147
6148
Steve Block6ded16b2010-05-10 14:33:55 +01006149void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006150 // According to ECMA-262 section 11.11, page 58, the binary logical
6151 // operators must yield the result of one of the two expressions
6152 // before any ToBoolean() conversions. This means that the value
6153 // produced by a && or || operator is not necessarily a boolean.
6154
6155 // NOTE: If the left hand side produces a materialized value (not in
6156 // the CC register), we force the right hand side to do the
6157 // same. This is necessary because we may have to branch to the exit
6158 // after evaluating the left hand side (due to the shortcut
6159 // semantics), but the compiler must (statically) know if the result
6160 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01006161 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006162 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006163 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006164 if (has_valid_frame() && !has_cc()) {
6165 // The left-hand side result is on top of the virtual frame.
6166 JumpTarget pop_and_continue;
6167 JumpTarget exit;
6168
Leon Clarkef7060e22010-06-03 12:02:55 +01006169 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006170 // Avoid popping the result if it converts to 'false' using the
6171 // standard ToBoolean() conversion as described in ECMA-262,
6172 // section 9.2, page 30.
6173 ToBoolean(&pop_and_continue, &exit);
6174 Branch(false, &exit);
6175
6176 // Pop the result of evaluating the first part.
6177 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006178 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006179
6180 // Evaluate right side expression.
6181 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006182 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006183
6184 // Exit (always with a materialized value).
6185 exit.Bind();
6186 } else if (has_cc() || is_true.is_linked()) {
6187 // The left-hand side is either (a) partially compiled to
6188 // control flow with a final branch left to emit or (b) fully
6189 // compiled to control flow and possibly true.
6190 if (has_cc()) {
6191 Branch(false, false_target());
6192 }
6193 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006194 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006195 } else {
6196 // Nothing to do.
6197 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
6198 }
6199
Steve Block6ded16b2010-05-10 14:33:55 +01006200 } else {
6201 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006202 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006203 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006204 if (has_valid_frame() && !has_cc()) {
6205 // The left-hand side result is on top of the virtual frame.
6206 JumpTarget pop_and_continue;
6207 JumpTarget exit;
6208
Leon Clarkef7060e22010-06-03 12:02:55 +01006209 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006210 // Avoid popping the result if it converts to 'true' using the
6211 // standard ToBoolean() conversion as described in ECMA-262,
6212 // section 9.2, page 30.
6213 ToBoolean(&exit, &pop_and_continue);
6214 Branch(true, &exit);
6215
6216 // Pop the result of evaluating the first part.
6217 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006218 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006219
6220 // Evaluate right side expression.
6221 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006222 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006223
6224 // Exit (always with a materialized value).
6225 exit.Bind();
6226 } else if (has_cc() || is_false.is_linked()) {
6227 // The left-hand side is either (a) partially compiled to
6228 // control flow with a final branch left to emit or (b) fully
6229 // compiled to control flow and possibly false.
6230 if (has_cc()) {
6231 Branch(true, true_target());
6232 }
6233 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006234 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006235 } else {
6236 // Nothing to do.
6237 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
6238 }
Steve Block6ded16b2010-05-10 14:33:55 +01006239 }
6240}
Steve Blocka7e24c12009-10-30 11:49:00 +00006241
Steve Block6ded16b2010-05-10 14:33:55 +01006242
6243void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
6244#ifdef DEBUG
6245 int original_height = frame_->height();
6246#endif
6247 Comment cmnt(masm_, "[ BinaryOperation");
6248
6249 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01006250 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006251 } else {
6252 // Optimize for the case where (at least) one of the expressions
6253 // is a literal small integer.
6254 Literal* lliteral = node->left()->AsLiteral();
6255 Literal* rliteral = node->right()->AsLiteral();
6256 // NOTE: The code below assumes that the slow cases (calls to runtime)
6257 // never return a constant/immutable object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006258 bool overwrite_left = node->left()->ResultOverwriteAllowed();
6259 bool overwrite_right = node->right()->ResultOverwriteAllowed();
Steve Blocka7e24c12009-10-30 11:49:00 +00006260
6261 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006262 VirtualFrame::RegisterAllocationScope scope(this);
6263 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006264 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006265 SmiOperation(node->op(),
6266 rliteral->handle(),
6267 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006268 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006269 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006270 VirtualFrame::RegisterAllocationScope scope(this);
6271 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006272 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006273 SmiOperation(node->op(),
6274 lliteral->handle(),
6275 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006276 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006277 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006278 GenerateInlineSmi inline_smi =
6279 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
6280 if (lliteral != NULL) {
6281 ASSERT(!lliteral->handle()->IsSmi());
6282 inline_smi = DONT_GENERATE_INLINE_SMI;
6283 }
6284 if (rliteral != NULL) {
6285 ASSERT(!rliteral->handle()->IsSmi());
6286 inline_smi = DONT_GENERATE_INLINE_SMI;
6287 }
Steve Block6ded16b2010-05-10 14:33:55 +01006288 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006289 OverwriteMode overwrite_mode = NO_OVERWRITE;
6290 if (overwrite_left) {
6291 overwrite_mode = OVERWRITE_LEFT;
6292 } else if (overwrite_right) {
6293 overwrite_mode = OVERWRITE_RIGHT;
6294 }
Steve Block6ded16b2010-05-10 14:33:55 +01006295 Load(node->left());
6296 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006297 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006298 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006299 }
6300 ASSERT(!has_valid_frame() ||
6301 (has_cc() && frame_->height() == original_height) ||
6302 (!has_cc() && frame_->height() == original_height + 1));
6303}
6304
6305
6306void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6307#ifdef DEBUG
6308 int original_height = frame_->height();
6309#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006310 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01006311 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006312}
6313
6314
6315void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6316#ifdef DEBUG
6317 int original_height = frame_->height();
6318#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006319 Comment cmnt(masm_, "[ CompareOperation");
6320
Steve Block6ded16b2010-05-10 14:33:55 +01006321 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
6322
Steve Blocka7e24c12009-10-30 11:49:00 +00006323 // Get the expressions from the node.
6324 Expression* left = node->left();
6325 Expression* right = node->right();
6326 Token::Value op = node->op();
6327
Steve Blocka7e24c12009-10-30 11:49:00 +00006328 // To make typeof testing for natives implemented in JavaScript really
6329 // efficient, we generate special code for expressions of the form:
6330 // 'typeof <expression> == <string>'.
6331 UnaryOperation* operation = left->AsUnaryOperation();
6332 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6333 (operation != NULL && operation->op() == Token::TYPEOF) &&
6334 (right->AsLiteral() != NULL &&
6335 right->AsLiteral()->handle()->IsString())) {
6336 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6337
Steve Block6ded16b2010-05-10 14:33:55 +01006338 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006339 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01006340 Register tos = frame_->PopToRegister();
6341
Steve Block6ded16b2010-05-10 14:33:55 +01006342 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006343
6344 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006345 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006346 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006347 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006348 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006349 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006350 cc_reg_ = eq;
6351
6352 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006353 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006354 false_target()->Branch(eq);
6355
Steve Block6ded16b2010-05-10 14:33:55 +01006356 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006357
6358 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01006359 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6360 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6361 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006362 false_target()->Branch(eq);
6363
Steve Block6ded16b2010-05-10 14:33:55 +01006364 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6365 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006366 cc_reg_ = lt;
6367
6368 } else if (check->Equals(Heap::boolean_symbol())) {
6369 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006370 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006371 true_target()->Branch(eq);
6372 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006373 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006374 cc_reg_ = eq;
6375
6376 } else if (check->Equals(Heap::undefined_symbol())) {
6377 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006378 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006379 true_target()->Branch(eq);
6380
Steve Block6ded16b2010-05-10 14:33:55 +01006381 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006382 false_target()->Branch(eq);
6383
6384 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006385 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6386 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6387 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6388 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006389
6390 cc_reg_ = eq;
6391
6392 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006393 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006394 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006395 Register map_reg = scratch;
6396 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006397 true_target()->Branch(eq);
6398 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01006399 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006400 cc_reg_ = eq;
6401
6402 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006403 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006404 false_target()->Branch(eq);
6405
Steve Blocka7e24c12009-10-30 11:49:00 +00006406 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006407 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006408 true_target()->Branch(eq);
6409
Steve Block6ded16b2010-05-10 14:33:55 +01006410 Register map_reg = scratch;
6411 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006412 false_target()->Branch(eq);
6413
Steve Blocka7e24c12009-10-30 11:49:00 +00006414 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006415 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
6416 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6417 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006418 false_target()->Branch(eq);
6419
Steve Block6ded16b2010-05-10 14:33:55 +01006420 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
6421 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006422 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01006423 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006424 cc_reg_ = le;
6425
6426 } else {
6427 // Uncommon case: typeof testing against a string literal that is
6428 // never returned from the typeof operator.
6429 false_target()->Jump();
6430 }
6431 ASSERT(!has_valid_frame() ||
6432 (has_cc() && frame_->height() == original_height));
6433 return;
6434 }
6435
6436 switch (op) {
6437 case Token::EQ:
6438 Comparison(eq, left, right, false);
6439 break;
6440
6441 case Token::LT:
6442 Comparison(lt, left, right);
6443 break;
6444
6445 case Token::GT:
6446 Comparison(gt, left, right);
6447 break;
6448
6449 case Token::LTE:
6450 Comparison(le, left, right);
6451 break;
6452
6453 case Token::GTE:
6454 Comparison(ge, left, right);
6455 break;
6456
6457 case Token::EQ_STRICT:
6458 Comparison(eq, left, right, true);
6459 break;
6460
6461 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006462 Load(left);
6463 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006464 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006465 frame_->EmitPush(r0);
6466 break;
6467 }
6468
6469 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006470 Load(left);
6471 Load(right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006472 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blocka7e24c12009-10-30 11:49:00 +00006473 frame_->CallStub(&stub, 2);
6474 // At this point if instanceof succeeded then r0 == 0.
6475 __ tst(r0, Operand(r0));
6476 cc_reg_ = eq;
6477 break;
6478 }
6479
6480 default:
6481 UNREACHABLE();
6482 }
6483 ASSERT((has_cc() && frame_->height() == original_height) ||
6484 (!has_cc() && frame_->height() == original_height + 1));
6485}
6486
6487
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006488void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
6489#ifdef DEBUG
6490 int original_height = frame_->height();
6491#endif
6492 Comment cmnt(masm_, "[ CompareToNull");
6493
6494 Load(node->expression());
6495 Register tos = frame_->PopToRegister();
6496 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6497 __ cmp(tos, ip);
6498
6499 // The 'null' value is only equal to 'undefined' if using non-strict
6500 // comparisons.
6501 if (!node->is_strict()) {
6502 true_target()->Branch(eq);
6503 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6504 __ cmp(tos, Operand(ip));
6505 true_target()->Branch(eq);
6506
6507 __ tst(tos, Operand(kSmiTagMask));
6508 false_target()->Branch(eq);
6509
6510 // It can be an undetectable object.
6511 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6512 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
6513 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6514 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
6515 }
6516
6517 cc_reg_ = eq;
6518 ASSERT(has_cc() && frame_->height() == original_height);
6519}
6520
6521
Steve Block6ded16b2010-05-10 14:33:55 +01006522class DeferredReferenceGetNamedValue: public DeferredCode {
6523 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006524 explicit DeferredReferenceGetNamedValue(Register receiver,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006525 Handle<String> name,
6526 bool is_contextual)
6527 : receiver_(receiver),
6528 name_(name),
6529 is_contextual_(is_contextual),
6530 is_dont_delete_(false) {
6531 set_comment(is_contextual
6532 ? "[ DeferredReferenceGetNamedValue (contextual)"
6533 : "[ DeferredReferenceGetNamedValue");
Steve Block6ded16b2010-05-10 14:33:55 +01006534 }
6535
6536 virtual void Generate();
6537
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006538 void set_is_dont_delete(bool value) {
6539 ASSERT(is_contextual_);
6540 is_dont_delete_ = value;
6541 }
6542
Steve Block6ded16b2010-05-10 14:33:55 +01006543 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006544 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006545 Handle<String> name_;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006546 bool is_contextual_;
6547 bool is_dont_delete_;
Steve Block6ded16b2010-05-10 14:33:55 +01006548};
6549
6550
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006551// Convention for this is that on entry the receiver is in a register that
6552// is not used by the stack. On exit the answer is found in that same
6553// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006554void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006555#ifdef DEBUG
6556 int expected_height = frame_state()->frame()->height();
6557#endif
6558 VirtualFrame copied_frame(*frame_state()->frame());
6559 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006560
Steve Block6ded16b2010-05-10 14:33:55 +01006561 Register scratch1 = VirtualFrame::scratch0();
6562 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006563 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01006564 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
6565 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
6566
Leon Clarkef7060e22010-06-03 12:02:55 +01006567 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6568 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006569 __ mov(r2, Operand(name_));
6570
6571 // The rest of the instructions in the deferred code must be together.
6572 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6573 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006574 RelocInfo::Mode mode = is_contextual_
6575 ? RelocInfo::CODE_TARGET_CONTEXT
6576 : RelocInfo::CODE_TARGET;
6577 __ Call(ic, mode);
6578 // We must mark the code just after the call with the correct marker.
6579 MacroAssembler::NopMarkerTypes code_marker;
6580 if (is_contextual_) {
6581 code_marker = is_dont_delete_
6582 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6583 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
6584 } else {
6585 code_marker = MacroAssembler::PROPERTY_ACCESS_INLINED;
6586 }
6587 __ MarkCode(code_marker);
Steve Block6ded16b2010-05-10 14:33:55 +01006588
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006589 // At this point the answer is in r0. We move it to the expected register
6590 // if necessary.
6591 __ Move(receiver_, r0);
6592
6593 // Now go back to the frame that we entered with. This will not overwrite
6594 // the receiver register since that register was not in use when we came
6595 // in. The instructions emitted by this merge are skipped over by the
6596 // inline load patching mechanism when looking for the branch instruction
6597 // that tells it where the code to patch is.
6598 copied_frame.MergeTo(frame_state()->frame());
6599
Steve Block6ded16b2010-05-10 14:33:55 +01006600 // Block the constant pool for one more instruction after leaving this
6601 // constant pool block scope to include the branch instruction ending the
6602 // deferred code.
6603 __ BlockConstPoolFor(1);
6604 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006605 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006606}
6607
6608
6609class DeferredReferenceGetKeyedValue: public DeferredCode {
6610 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006611 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6612 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006613 set_comment("[ DeferredReferenceGetKeyedValue");
6614 }
6615
6616 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006617
6618 private:
6619 Register key_;
6620 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006621};
6622
6623
Steve Block8defd9f2010-07-08 12:39:36 +01006624// Takes key and register in r0 and r1 or vice versa. Returns result
6625// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006626void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006627 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6628 (key_.is(r1) && receiver_.is(r0)));
6629
Steve Block8defd9f2010-07-08 12:39:36 +01006630 VirtualFrame copied_frame(*frame_state()->frame());
6631 copied_frame.SpillAll();
6632
Steve Block6ded16b2010-05-10 14:33:55 +01006633 Register scratch1 = VirtualFrame::scratch0();
6634 Register scratch2 = VirtualFrame::scratch1();
6635 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
6636 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
6637
Kristian Monsen25f61362010-05-21 11:50:48 +01006638 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6639 // convention.
6640 if (key_.is(r1)) {
6641 __ Swap(r0, r1, ip);
6642 }
6643
Steve Block6ded16b2010-05-10 14:33:55 +01006644 // The rest of the instructions in the deferred code must be together.
6645 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006646 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01006647 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6648 __ Call(ic, RelocInfo::CODE_TARGET);
6649 // The call must be followed by a nop instruction to indicate that the
6650 // keyed load has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006651 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006652
Steve Block8defd9f2010-07-08 12:39:36 +01006653 // Now go back to the frame that we entered with. This will not overwrite
6654 // the receiver or key registers since they were not in use when we came
6655 // in. The instructions emitted by this merge are skipped over by the
6656 // inline load patching mechanism when looking for the branch instruction
6657 // that tells it where the code to patch is.
6658 copied_frame.MergeTo(frame_state()->frame());
6659
Steve Block6ded16b2010-05-10 14:33:55 +01006660 // Block the constant pool for one more instruction after leaving this
6661 // constant pool block scope to include the branch instruction ending the
6662 // deferred code.
6663 __ BlockConstPoolFor(1);
6664 }
6665}
6666
6667
6668class DeferredReferenceSetKeyedValue: public DeferredCode {
6669 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006670 DeferredReferenceSetKeyedValue(Register value,
6671 Register key,
6672 Register receiver)
6673 : value_(value), key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006674 set_comment("[ DeferredReferenceSetKeyedValue");
6675 }
6676
6677 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006678
6679 private:
6680 Register value_;
6681 Register key_;
6682 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006683};
6684
6685
6686void DeferredReferenceSetKeyedValue::Generate() {
6687 Register scratch1 = VirtualFrame::scratch0();
6688 Register scratch2 = VirtualFrame::scratch1();
6689 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
6690 __ IncrementCounter(
6691 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
6692
Leon Clarkef7060e22010-06-03 12:02:55 +01006693 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6694 // calling convention.
6695 if (value_.is(r1)) {
6696 __ Swap(r0, r1, ip);
6697 }
6698 ASSERT(receiver_.is(r2));
6699
Steve Block6ded16b2010-05-10 14:33:55 +01006700 // The rest of the instructions in the deferred code must be together.
6701 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006702 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6703 // r1 and r2.
Steve Block6ded16b2010-05-10 14:33:55 +01006704 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
6705 __ Call(ic, RelocInfo::CODE_TARGET);
6706 // The call must be followed by a nop instruction to indicate that the
6707 // keyed store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006708 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006709
6710 // Block the constant pool for one more instruction after leaving this
6711 // constant pool block scope to include the branch instruction ending the
6712 // deferred code.
6713 __ BlockConstPoolFor(1);
6714 }
6715}
6716
6717
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006718class DeferredReferenceSetNamedValue: public DeferredCode {
6719 public:
6720 DeferredReferenceSetNamedValue(Register value,
6721 Register receiver,
6722 Handle<String> name)
6723 : value_(value), receiver_(receiver), name_(name) {
6724 set_comment("[ DeferredReferenceSetNamedValue");
6725 }
6726
6727 virtual void Generate();
6728
6729 private:
6730 Register value_;
6731 Register receiver_;
6732 Handle<String> name_;
6733};
6734
6735
6736// Takes value in r0, receiver in r1 and returns the result (the
6737// value) in r0.
6738void DeferredReferenceSetNamedValue::Generate() {
6739 // Record the entry frame and spill.
6740 VirtualFrame copied_frame(*frame_state()->frame());
6741 copied_frame.SpillAll();
6742
6743 // Ensure value in r0, receiver in r1 to match store ic calling
6744 // convention.
6745 ASSERT(value_.is(r0) && receiver_.is(r1));
6746 __ mov(r2, Operand(name_));
6747
6748 // The rest of the instructions in the deferred code must be together.
6749 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6750 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6751 // r1 and r2.
6752 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
6753 __ Call(ic, RelocInfo::CODE_TARGET);
6754 // The call must be followed by a nop instruction to indicate that the
6755 // named store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006756 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006757
6758 // Go back to the frame we entered with. The instructions
6759 // generated by this merge are skipped over by the inline store
6760 // patching mechanism when looking for the branch instruction that
6761 // tells it where the code to patch is.
6762 copied_frame.MergeTo(frame_state()->frame());
6763
6764 // Block the constant pool for one more instruction after leaving this
6765 // constant pool block scope to include the branch instruction ending the
6766 // deferred code.
6767 __ BlockConstPoolFor(1);
6768 }
6769}
6770
6771
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006772// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006773void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006774 bool contextual_load_in_builtin =
6775 is_contextual &&
6776 (Bootstrapper::IsActive() ||
6777 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6778
6779 if (scope()->is_global_scope() ||
6780 loop_nesting() == 0 ||
6781 contextual_load_in_builtin) {
Steve Block6ded16b2010-05-10 14:33:55 +01006782 Comment cmnt(masm(), "[ Load from named Property");
6783 // Setup the name register and call load IC.
6784 frame_->CallLoadIC(name,
6785 is_contextual
6786 ? RelocInfo::CODE_TARGET_CONTEXT
6787 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006788 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006789 } else {
6790 // Inline the in-object property case.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006791 Comment cmnt(masm(), is_contextual
6792 ? "[ Inlined contextual property load"
6793 : "[ Inlined named property load");
Steve Block6ded16b2010-05-10 14:33:55 +01006794
6795 // Counter will be decremented in the deferred code. Placed here to avoid
6796 // having it in the instruction stream below where patching will occur.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006797 if (is_contextual) {
6798 __ IncrementCounter(&Counters::named_load_global_inline, 1,
6799 frame_->scratch0(), frame_->scratch1());
6800 } else {
6801 __ IncrementCounter(&Counters::named_load_inline, 1,
6802 frame_->scratch0(), frame_->scratch1());
6803 }
Steve Block6ded16b2010-05-10 14:33:55 +01006804
6805 // The following instructions are the inlined load of an in-object property.
6806 // Parts of this code is patched, so the exact instructions generated needs
6807 // to be fixed. Therefore the instruction pool is blocked when generating
6808 // this code
6809
6810 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006811 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006812
6813 DeferredReferenceGetNamedValue* deferred =
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006814 new DeferredReferenceGetNamedValue(receiver, name, is_contextual);
Steve Block6ded16b2010-05-10 14:33:55 +01006815
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006816 bool is_dont_delete = false;
6817 if (is_contextual) {
6818 if (!info_->closure().is_null()) {
6819 // When doing lazy compilation we can check if the global cell
6820 // already exists and use its "don't delete" status as a hint.
6821 AssertNoAllocation no_gc;
6822 v8::internal::GlobalObject* global_object =
6823 info_->closure()->context()->global();
6824 LookupResult lookup;
6825 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6826 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6827 ASSERT(lookup.holder() == global_object);
6828 ASSERT(global_object->property_dictionary()->ValueAt(
6829 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6830 is_dont_delete = lookup.IsDontDelete();
6831 }
6832 }
6833 if (is_dont_delete) {
6834 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1,
6835 frame_->scratch0(), frame_->scratch1());
6836 }
6837 }
Steve Block6ded16b2010-05-10 14:33:55 +01006838
6839 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006840 if (!is_contextual) {
6841 // Check that the receiver is a heap object.
6842 __ tst(receiver, Operand(kSmiTagMask));
6843 deferred->Branch(eq);
6844 }
6845
6846 // Check for the_hole_value if necessary.
6847 // Below we rely on the number of instructions generated, and we can't
6848 // cope with the Check macro which does not generate a fixed number of
6849 // instructions.
6850 Label skip, check_the_hole, cont;
6851 if (FLAG_debug_code && is_contextual && is_dont_delete) {
6852 __ b(&skip);
6853 __ bind(&check_the_hole);
6854 __ Check(ne, "DontDelete cells can't contain the hole");
6855 __ b(&cont);
6856 __ bind(&skip);
6857 }
6858
6859#ifdef DEBUG
6860 int InlinedNamedLoadInstructions = 5;
6861 Label check_inlined_codesize;
6862 masm_->bind(&check_inlined_codesize);
6863#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006864
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006865 Register scratch = VirtualFrame::scratch0();
6866 Register scratch2 = VirtualFrame::scratch1();
6867
Steve Block6ded16b2010-05-10 14:33:55 +01006868 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006869 // code. Therefore we can't use a LoadRoot call.
6870 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6871 __ mov(scratch2, Operand(Factory::null_value()));
6872 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006873 deferred->Branch(ne);
6874
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006875 if (is_contextual) {
6876#ifdef DEBUG
6877 InlinedNamedLoadInstructions += 1;
6878#endif
6879 // Load the (initially invalid) cell and get its value.
6880 masm()->mov(receiver, Operand(Factory::null_value()));
6881 __ ldr(receiver,
6882 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6883
6884 deferred->set_is_dont_delete(is_dont_delete);
6885
6886 if (!is_dont_delete) {
6887#ifdef DEBUG
6888 InlinedNamedLoadInstructions += 3;
6889#endif
6890 __ cmp(receiver, Operand(Factory::the_hole_value()));
6891 deferred->Branch(eq);
6892 } else if (FLAG_debug_code) {
6893#ifdef DEBUG
6894 InlinedNamedLoadInstructions += 3;
6895#endif
6896 __ cmp(receiver, Operand(Factory::the_hole_value()));
6897 __ b(&check_the_hole, eq);
6898 __ bind(&cont);
6899 }
6900 } else {
6901 // Initially use an invalid index. The index will be patched by the
6902 // inline cache code.
6903 __ ldr(receiver, MemOperand(receiver, 0));
6904 }
Steve Block6ded16b2010-05-10 14:33:55 +01006905
6906 // Make sure that the expected number of instructions are generated.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006907 // If the code before is updated, the offsets in ic-arm.cc
6908 // LoadIC::PatchInlinedContextualLoad and PatchInlinedLoad need
6909 // to be updated.
6910 ASSERT_EQ(InlinedNamedLoadInstructions,
Steve Block6ded16b2010-05-10 14:33:55 +01006911 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6912 }
6913
6914 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006915 // At this point the receiver register has the result, either from the
6916 // deferred code or from the inlined code.
6917 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006918 }
6919}
6920
6921
6922void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6923#ifdef DEBUG
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006924 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
Steve Block6ded16b2010-05-10 14:33:55 +01006925#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006926
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006927 Result result;
6928 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6929 frame()->CallStoreIC(name, is_contextual);
6930 } else {
6931 // Inline the in-object property case.
6932 JumpTarget slow, done;
6933
6934 // Get the value and receiver from the stack.
6935 frame()->PopToR0();
6936 Register value = r0;
6937 frame()->PopToR1();
6938 Register receiver = r1;
6939
6940 DeferredReferenceSetNamedValue* deferred =
6941 new DeferredReferenceSetNamedValue(value, receiver, name);
6942
6943 // Check that the receiver is a heap object.
6944 __ tst(receiver, Operand(kSmiTagMask));
6945 deferred->Branch(eq);
6946
6947 // The following instructions are the part of the inlined
6948 // in-object property store code which can be patched. Therefore
6949 // the exact number of instructions generated must be fixed, so
6950 // the constant pool is blocked while generating this code.
6951 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6952 Register scratch0 = VirtualFrame::scratch0();
6953 Register scratch1 = VirtualFrame::scratch1();
6954
6955 // Check the map. Initially use an invalid map to force a
6956 // failure. The map check will be patched in the runtime system.
6957 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
6958
6959#ifdef DEBUG
6960 Label check_inlined_codesize;
6961 masm_->bind(&check_inlined_codesize);
6962#endif
6963 __ mov(scratch0, Operand(Factory::null_value()));
6964 __ cmp(scratch0, scratch1);
6965 deferred->Branch(ne);
6966
6967 int offset = 0;
6968 __ str(value, MemOperand(receiver, offset));
6969
6970 // Update the write barrier and record its size. We do not use
6971 // the RecordWrite macro here because we want the offset
6972 // addition instruction first to make it easy to patch.
6973 Label record_write_start, record_write_done;
6974 __ bind(&record_write_start);
6975 // Add offset into the object.
6976 __ add(scratch0, receiver, Operand(offset));
6977 // Test that the object is not in the new space. We cannot set
6978 // region marks for new space pages.
6979 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
6980 // Record the actual write.
6981 __ RecordWriteHelper(receiver, scratch0, scratch1);
6982 __ bind(&record_write_done);
6983 // Clobber all input registers when running with the debug-code flag
6984 // turned on to provoke errors.
6985 if (FLAG_debug_code) {
6986 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
6987 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
6988 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
6989 }
6990 // Check that this is the first inlined write barrier or that
6991 // this inlined write barrier has the same size as all the other
6992 // inlined write barriers.
6993 ASSERT((inlined_write_barrier_size_ == -1) ||
6994 (inlined_write_barrier_size_ ==
6995 masm()->InstructionsGeneratedSince(&record_write_start)));
6996 inlined_write_barrier_size_ =
6997 masm()->InstructionsGeneratedSince(&record_write_start);
6998
6999 // Make sure that the expected number of instructions are generated.
7000 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7001 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7002 }
7003 deferred->BindExit();
7004 }
7005 ASSERT_EQ(expected_height, frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01007006}
7007
7008
7009void CodeGenerator::EmitKeyedLoad() {
7010 if (loop_nesting() == 0) {
7011 Comment cmnt(masm_, "[ Load from keyed property");
7012 frame_->CallKeyedLoadIC();
7013 } else {
7014 // Inline the keyed load.
7015 Comment cmnt(masm_, "[ Inlined load from keyed property");
7016
7017 // Counter will be decremented in the deferred code. Placed here to avoid
7018 // having it in the instruction stream below where patching will occur.
7019 __ IncrementCounter(&Counters::keyed_load_inline, 1,
7020 frame_->scratch0(), frame_->scratch1());
7021
Kristian Monsen25f61362010-05-21 11:50:48 +01007022 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007023 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01007024 Register key = frame_->PopToRegister();
7025 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01007026
Kristian Monsen25f61362010-05-21 11:50:48 +01007027 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01007028 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01007029 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007030
7031 // Check that the receiver is a heap object.
7032 __ tst(receiver, Operand(kSmiTagMask));
7033 deferred->Branch(eq);
7034
7035 // The following instructions are the part of the inlined load keyed
7036 // property code which can be patched. Therefore the exact number of
7037 // instructions generated need to be fixed, so the constant pool is blocked
7038 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007039 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7040 Register scratch1 = VirtualFrame::scratch0();
7041 Register scratch2 = VirtualFrame::scratch1();
7042 // Check the map. The null map used below is patched by the inline cache
7043 // code.
7044 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007045
7046 // Check that the key is a smi.
7047 if (!key_is_known_smi) {
7048 __ tst(key, Operand(kSmiTagMask));
7049 deferred->Branch(ne);
7050 }
7051
Kristian Monsen25f61362010-05-21 11:50:48 +01007052#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007053 Label check_inlined_codesize;
7054 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01007055#endif
Steve Block6ded16b2010-05-10 14:33:55 +01007056 __ mov(scratch2, Operand(Factory::null_value()));
7057 __ cmp(scratch1, scratch2);
7058 deferred->Branch(ne);
7059
Iain Merrick75681382010-08-19 15:07:18 +01007060 // Get the elements array from the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01007061 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01007062 __ AssertFastElements(scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007063
7064 // Check that key is within bounds. Use unsigned comparison to handle
7065 // negative keys.
7066 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007067 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007068 deferred->Branch(ls); // Unsigned less equal.
7069
7070 // Load and check that the result is not the hole (key is a smi).
7071 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
7072 __ add(scratch1,
7073 scratch1,
7074 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01007075 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01007076 MemOperand(scratch1, key, LSL,
7077 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01007078 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01007079 deferred->Branch(eq);
7080
Kristian Monsen25f61362010-05-21 11:50:48 +01007081 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007082 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01007083 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01007084 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7085 }
7086
7087 deferred->BindExit();
7088 }
7089}
7090
7091
Steve Block8defd9f2010-07-08 12:39:36 +01007092void CodeGenerator::EmitKeyedStore(StaticType* key_type,
7093 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01007094 // Generate inlined version of the keyed store if the code is in a loop
7095 // and the key is likely to be a smi.
7096 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7097 // Inline the keyed store.
7098 Comment cmnt(masm_, "[ Inlined store to keyed property");
7099
Leon Clarkef7060e22010-06-03 12:02:55 +01007100 Register scratch1 = VirtualFrame::scratch0();
7101 Register scratch2 = VirtualFrame::scratch1();
7102 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01007103
7104 // Counter will be decremented in the deferred code. Placed here to avoid
7105 // having it in the instruction stream below where patching will occur.
7106 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01007107 scratch1, scratch2);
7108
Steve Block8defd9f2010-07-08 12:39:36 +01007109
7110
Leon Clarkef7060e22010-06-03 12:02:55 +01007111 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01007112 bool value_is_harmless = frame_->KnownSmiAt(0);
7113 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7114 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01007115 Register value = frame_->PopToRegister();
7116 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01007117 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01007118 Register receiver = r2;
7119 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01007120
7121#ifdef DEBUG
7122 bool we_remembered_the_write_barrier = value_is_harmless;
7123#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01007124
7125 // The deferred code expects value, key and receiver in registers.
7126 DeferredReferenceSetKeyedValue* deferred =
7127 new DeferredReferenceSetKeyedValue(value, key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007128
7129 // Check that the value is a smi. As this inlined code does not set the
7130 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01007131 if (!value_is_harmless) {
7132 // If the value is not likely to be a Smi then let's test the fixed array
7133 // for new space instead. See below.
7134 if (wb_info == LIKELY_SMI) {
7135 __ tst(value, Operand(kSmiTagMask));
7136 deferred->Branch(ne);
7137#ifdef DEBUG
7138 we_remembered_the_write_barrier = true;
7139#endif
7140 }
7141 }
Steve Block6ded16b2010-05-10 14:33:55 +01007142
Steve Block8defd9f2010-07-08 12:39:36 +01007143 if (!key_is_smi) {
7144 // Check that the key is a smi.
7145 __ tst(key, Operand(kSmiTagMask));
7146 deferred->Branch(ne);
7147 }
Steve Block6ded16b2010-05-10 14:33:55 +01007148
7149 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01007150 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007151 deferred->Branch(eq);
7152
7153 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01007154 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01007155 deferred->Branch(ne);
7156
7157 // Check that the key is within bounds. Both the key and the length of
7158 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01007159 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
7160 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007161 deferred->Branch(ls); // Unsigned less equal.
7162
Steve Block8defd9f2010-07-08 12:39:36 +01007163 // Get the elements array from the receiver.
7164 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7165 if (!value_is_harmless && wb_info != LIKELY_SMI) {
7166 Label ok;
7167 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
7168 __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
7169 __ tst(value, Operand(kSmiTagMask), ne);
7170 deferred->Branch(ne);
7171#ifdef DEBUG
7172 we_remembered_the_write_barrier = true;
7173#endif
7174 }
7175 // Check that the elements array is not a dictionary.
7176 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007177 // The following instructions are the part of the inlined store keyed
7178 // property code which can be patched. Therefore the exact number of
7179 // instructions generated need to be fixed, so the constant pool is blocked
7180 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007181 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01007182#ifdef DEBUG
7183 Label check_inlined_codesize;
7184 masm_->bind(&check_inlined_codesize);
7185#endif
7186
Steve Block6ded16b2010-05-10 14:33:55 +01007187 // Read the fixed array map from the constant pool (not from the root
7188 // array) so that the value can be patched. When debugging, we patch this
7189 // comparison to always fail so that we will hit the IC call in the
7190 // deferred code which will allow the debugger to break for fast case
7191 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01007192 __ mov(scratch3, Operand(Factory::fixed_array_map()));
7193 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01007194 deferred->Branch(ne);
7195
7196 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01007197 __ add(scratch1, scratch1,
7198 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7199 __ str(value,
7200 MemOperand(scratch1, key, LSL,
7201 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01007202
7203 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01007204 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01007205 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7206 }
7207
Steve Block8defd9f2010-07-08 12:39:36 +01007208 ASSERT(we_remembered_the_write_barrier);
7209
Steve Block6ded16b2010-05-10 14:33:55 +01007210 deferred->BindExit();
7211 } else {
7212 frame()->CallKeyedStoreIC();
7213 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007214}
7215
7216
Steve Blocka7e24c12009-10-30 11:49:00 +00007217#ifdef DEBUG
7218bool CodeGenerator::HasValidEntryRegisters() { return true; }
7219#endif
7220
7221
7222#undef __
7223#define __ ACCESS_MASM(masm)
7224
Steve Blocka7e24c12009-10-30 11:49:00 +00007225Handle<String> Reference::GetName() {
7226 ASSERT(type_ == NAMED);
7227 Property* property = expression_->AsProperty();
7228 if (property == NULL) {
7229 // Global variable reference treated as a named property reference.
7230 VariableProxy* proxy = expression_->AsVariableProxy();
7231 ASSERT(proxy->AsVariable() != NULL);
7232 ASSERT(proxy->AsVariable()->is_global());
7233 return proxy->name();
7234 } else {
7235 Literal* raw_name = property->key()->AsLiteral();
7236 ASSERT(raw_name != NULL);
7237 return Handle<String>(String::cast(*raw_name->handle()));
7238 }
7239}
7240
7241
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007242void Reference::DupIfPersist() {
7243 if (persist_after_get_) {
7244 switch (type_) {
7245 case KEYED:
7246 cgen_->frame()->Dup2();
7247 break;
7248 case NAMED:
7249 cgen_->frame()->Dup();
7250 // Fall through.
7251 case UNLOADED:
7252 case ILLEGAL:
7253 case SLOT:
7254 // Do nothing.
7255 ;
7256 }
7257 } else {
7258 set_unloaded();
7259 }
7260}
7261
7262
Steve Blockd0582a62009-12-15 09:54:21 +00007263void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007264 ASSERT(cgen_->HasValidEntryRegisters());
7265 ASSERT(!is_illegal());
7266 ASSERT(!cgen_->has_cc());
7267 MacroAssembler* masm = cgen_->masm();
7268 Property* property = expression_->AsProperty();
7269 if (property != NULL) {
7270 cgen_->CodeForSourcePosition(property->position());
7271 }
7272
7273 switch (type_) {
7274 case SLOT: {
7275 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007276 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00007277 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007278 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007279 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007280 break;
7281 }
7282
7283 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007284 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01007285 bool is_global = var != NULL;
7286 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007287 Handle<String> name = GetName();
7288 DupIfPersist();
7289 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00007290 break;
7291 }
7292
7293 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01007294 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007295 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007296 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00007297 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007298 break;
7299 }
7300
7301 default:
7302 UNREACHABLE();
7303 }
7304}
7305
7306
Steve Block8defd9f2010-07-08 12:39:36 +01007307void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007308 ASSERT(!is_illegal());
7309 ASSERT(!cgen_->has_cc());
7310 MacroAssembler* masm = cgen_->masm();
7311 VirtualFrame* frame = cgen_->frame();
7312 Property* property = expression_->AsProperty();
7313 if (property != NULL) {
7314 cgen_->CodeForSourcePosition(property->position());
7315 }
7316
7317 switch (type_) {
7318 case SLOT: {
7319 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007320 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Leon Clarkee46be812010-01-19 14:06:41 +00007321 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01007322 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007323 break;
7324 }
7325
7326 case NAMED: {
7327 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01007328 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007329 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00007330 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007331 break;
7332 }
7333
7334 case KEYED: {
7335 Comment cmnt(masm, "[ Store to keyed Property");
7336 Property* property = expression_->AsProperty();
7337 ASSERT(property != NULL);
7338 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01007339 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00007340 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01007341 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007342 break;
7343 }
7344
7345 default:
7346 UNREACHABLE();
7347 }
7348}
7349
7350
Leon Clarkee46be812010-01-19 14:06:41 +00007351const char* GenericBinaryOpStub::GetName() {
7352 if (name_ != NULL) return name_;
7353 const int len = 100;
7354 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7355 if (name_ == NULL) return "OOM";
7356 const char* op_name = Token::Name(op_);
7357 const char* overwrite_name;
7358 switch (mode_) {
7359 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7360 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7361 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7362 default: overwrite_name = "UnknownOverwrite"; break;
7363 }
7364
7365 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007366 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00007367 op_name,
7368 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007369 specialized_on_rhs_ ? "_ConstantRhs" : "",
7370 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00007371 return name_;
7372}
7373
7374
Steve Blocka7e24c12009-10-30 11:49:00 +00007375#undef __
7376
7377} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01007378
7379#endif // V8_TARGET_ARCH_ARM