blob: f985fb4ba1b88ce5ec2c70696faae5c101295da1 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "code-stubs.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010037#include "ic-inl.h"
38#include "jsregexp.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010039#include "jump-target-light-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "regexp-macro-assembler.h"
42#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "register-allocator-inl.h"
44#include "runtime.h"
45#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010046#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010047#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49namespace v8 {
50namespace internal {
51
Kristian Monsen25f61362010-05-21 11:50:48 +010052
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010053#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000054
55// -------------------------------------------------------------------------
56// Platform-specific DeferredCode functions.
57
58void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010059 // On ARM you either have a completely spilled frame or you
60 // handle it yourself, but at the moment there's no automation
61 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000062}
63
64
65void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010066}
67
68
69// -------------------------------------------------------------------------
70// Platform-specific RuntimeCallHelper functions.
71
72void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
73 frame_state_->frame()->AssertIsSpilled();
74}
75
76
77void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
78}
79
80
81void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
82 masm->EnterInternalFrame();
83}
84
85
86void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
87 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +000088}
89
90
91// -------------------------------------------------------------------------
92// CodeGenState implementation.
93
94CodeGenState::CodeGenState(CodeGenerator* owner)
95 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010096 previous_(owner->state()) {
97 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +000098}
99
100
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100101ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
102 JumpTarget* true_target,
103 JumpTarget* false_target)
104 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100106 false_target_(false_target) {
107 owner->set_state(this);
108}
109
110
111TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
112 Slot* slot,
113 TypeInfo type_info)
114 : CodeGenState(owner),
115 slot_(slot) {
116 owner->set_state(this);
117 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000118}
119
120
121CodeGenState::~CodeGenState() {
122 ASSERT(owner_->state() == this);
123 owner_->set_state(previous_);
124}
125
126
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100127TypeInfoCodeGenState::~TypeInfoCodeGenState() {
128 owner()->set_type_info(slot_, old_type_info_);
129}
130
Steve Blocka7e24c12009-10-30 11:49:00 +0000131// -------------------------------------------------------------------------
132// CodeGenerator implementation
133
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100134int CodeGenerator::inlined_write_barrier_size_ = -1;
135
Andrei Popescu31002712010-02-23 13:46:05 +0000136CodeGenerator::CodeGenerator(MacroAssembler* masm)
137 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000138 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000139 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000140 frame_(NULL),
141 allocator_(NULL),
142 cc_reg_(al),
143 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100144 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100145 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100146 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000147 function_return_is_shadowed_(false) {
148}
149
150
151// Calling conventions:
152// fp: caller's frame pointer
153// sp: stack pointer
154// r1: called JS function
155// cp: callee's context
156
Andrei Popescu402d9372010-02-26 13:31:12 +0000157void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000158 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000159 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100160 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000161
162 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000163 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100164
165 int slots = scope()->num_parameters() + scope()->num_stack_slots();
166 ScopedVector<TypeInfo> type_info_array(slots);
167 type_info_ = &type_info_array;
168
Steve Blocka7e24c12009-10-30 11:49:00 +0000169 ASSERT(allocator_ == NULL);
170 RegisterAllocator register_allocator(this);
171 allocator_ = &register_allocator;
172 ASSERT(frame_ == NULL);
173 frame_ = new VirtualFrame();
174 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100175
176 // Adjust for function-level loop nesting.
177 ASSERT_EQ(0, loop_nesting_);
178 loop_nesting_ = info->loop_nesting();
179
Steve Blocka7e24c12009-10-30 11:49:00 +0000180 {
181 CodeGenState state(this);
182
183 // Entry:
184 // Stack: receiver, arguments
185 // lr: return address
186 // fp: caller's frame pointer
187 // sp: stack pointer
188 // r1: called JS function
189 // cp: callee's context
190 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000191
Steve Blocka7e24c12009-10-30 11:49:00 +0000192#ifdef DEBUG
193 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000194 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 frame_->SpillAll();
196 __ stop("stop-at");
197 }
198#endif
199
Iain Merrick75681382010-08-19 15:07:18 +0100200 frame_->Enter();
201 // tos: code slot
Leon Clarke4515c472010-02-03 11:58:03 +0000202
Iain Merrick75681382010-08-19 15:07:18 +0100203 // Allocate space for locals and initialize them. This also checks
204 // for stack overflow.
205 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 frame_->AssertIsSpilled();
208 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
209 if (heap_slots > 0) {
210 // Allocate local context.
211 // Get outer context and create a new context based on it.
212 __ ldr(r0, frame_->Function());
213 frame_->EmitPush(r0);
214 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
215 FastNewContextStub stub(heap_slots);
216 frame_->CallStub(&stub, 1);
217 } else {
218 frame_->CallRuntime(Runtime::kNewContext, 1);
219 }
Leon Clarke4515c472010-02-03 11:58:03 +0000220
221#ifdef DEBUG
Iain Merrick75681382010-08-19 15:07:18 +0100222 JumpTarget verified_true;
223 __ cmp(r0, cp);
224 verified_true.Branch(eq);
225 __ stop("NewContext: r0 is expected to be the same as cp");
226 verified_true.Bind();
Leon Clarke4515c472010-02-03 11:58:03 +0000227#endif
Iain Merrick75681382010-08-19 15:07:18 +0100228 // Update context local.
229 __ str(cp, frame_->Context());
230 }
Leon Clarke4515c472010-02-03 11:58:03 +0000231
Iain Merrick75681382010-08-19 15:07:18 +0100232 // TODO(1241774): Improve this code:
233 // 1) only needed if we have a context
234 // 2) no need to recompute context ptr every single time
235 // 3) don't copy parameter operand code from SlotOperand!
236 {
237 Comment cmnt2(masm_, "[ copy context parameters into .context");
238 // Note that iteration order is relevant here! If we have the same
239 // parameter twice (e.g., function (x, y, x)), and that parameter
240 // needs to be copied into the context, it must be the last argument
241 // passed to the parameter that needs to be copied. This is a rare
242 // case so we don't check for it, instead we rely on the copying
243 // order: such a parameter is copied repeatedly into the same
244 // context location and thus the last value is what is seen inside
245 // the function.
246 frame_->AssertIsSpilled();
247 for (int i = 0; i < scope()->num_parameters(); i++) {
248 Variable* par = scope()->parameter(i);
249 Slot* slot = par->slot();
250 if (slot != NULL && slot->type() == Slot::CONTEXT) {
251 ASSERT(!scope()->is_global_scope()); // No params in global scope.
252 __ ldr(r1, frame_->ParameterAt(i));
253 // Loads r2 with context; used below in RecordWrite.
254 __ str(r1, SlotOperand(slot, r2));
255 // Load the offset into r3.
256 int slot_offset =
257 FixedArray::kHeaderSize + slot->index() * kPointerSize;
258 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000259 }
260 }
Iain Merrick75681382010-08-19 15:07:18 +0100261 }
Leon Clarke4515c472010-02-03 11:58:03 +0000262
Iain Merrick75681382010-08-19 15:07:18 +0100263 // Store the arguments object. This must happen after context
264 // initialization because the arguments object may be stored in
265 // the context.
266 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
267 StoreArgumentsObject(true);
268 }
Leon Clarke4515c472010-02-03 11:58:03 +0000269
Iain Merrick75681382010-08-19 15:07:18 +0100270 // Initialize ThisFunction reference if present.
271 if (scope()->is_function_scope() && scope()->function() != NULL) {
272 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
273 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000274 }
275
Steve Blocka7e24c12009-10-30 11:49:00 +0000276 // Initialize the function return target after the locals are set
277 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100278 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000279 function_return_is_shadowed_ = false;
280
Steve Blocka7e24c12009-10-30 11:49:00 +0000281 // Generate code to 'execute' declarations and initialize functions
282 // (source elements). In case of an illegal redeclaration we need to
283 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000284 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000286 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 } else {
288 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000289 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000290 // Bail out if a stack-overflow exception occurred when processing
291 // declarations.
292 if (HasStackOverflow()) return;
293 }
294
295 if (FLAG_trace) {
296 frame_->CallRuntime(Runtime::kTraceEnter, 0);
297 // Ignore the return value.
298 }
299
300 // Compile the body of the function in a vanilla state. Don't
301 // bother compiling all the code if the scope has an illegal
302 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000303 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 Comment cmnt(masm_, "[ function body");
305#ifdef DEBUG
306 bool is_builtin = Bootstrapper::IsActive();
307 bool should_trace =
308 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
309 if (should_trace) {
310 frame_->CallRuntime(Runtime::kDebugTrace, 0);
311 // Ignore the return value.
312 }
313#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100314 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000315 }
316 }
317
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100318 // Handle the return from the function.
319 if (has_valid_frame()) {
320 // If there is a valid frame, control flow can fall off the end of
321 // the body. In that case there is an implicit return statement.
322 ASSERT(!function_return_is_shadowed_);
323 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000324 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100325 if (function_return_.is_bound()) {
326 function_return_.Jump();
327 } else {
328 function_return_.Bind();
329 GenerateReturnSequence();
330 }
331 } else if (function_return_.is_linked()) {
332 // If the return target has dangling jumps to it, then we have not
333 // yet generated the return sequence. This can happen when (a)
334 // control does not flow off the end of the body so we did not
335 // compile an artificial return statement just above, and (b) there
336 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000337 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100338 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 }
340
Steve Block6ded16b2010-05-10 14:33:55 +0100341 // Adjust for function-level loop nesting.
342 ASSERT(loop_nesting_ == info->loop_nesting());
343 loop_nesting_ = 0;
344
Steve Blocka7e24c12009-10-30 11:49:00 +0000345 // Code generation state must be reset.
346 ASSERT(!has_cc());
347 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100348 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 ASSERT(!function_return_is_shadowed_);
350 function_return_.Unuse();
351 DeleteFrame();
352
353 // Process any deferred code using the register allocator.
354 if (!HasStackOverflow()) {
355 ProcessDeferred();
356 }
357
358 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100359 type_info_ = NULL;
360}
361
362
363int CodeGenerator::NumberOfSlot(Slot* slot) {
364 if (slot == NULL) return kInvalidSlotNumber;
365 switch (slot->type()) {
366 case Slot::PARAMETER:
367 return slot->index();
368 case Slot::LOCAL:
369 return slot->index() + scope()->num_parameters();
370 default:
371 break;
372 }
373 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000374}
375
376
377MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
378 // Currently, this assertion will fail if we try to assign to
379 // a constant variable that is constant because it is read-only
380 // (such as the variable referring to a named function expression).
381 // We need to implement assignments to read-only variables.
382 // Ideally, we should do this during AST generation (by converting
383 // such assignments into expression statements); however, in general
384 // we may not be able to make the decision until past AST generation,
385 // that is when the entire program is known.
386 ASSERT(slot != NULL);
387 int index = slot->index();
388 switch (slot->type()) {
389 case Slot::PARAMETER:
390 return frame_->ParameterAt(index);
391
392 case Slot::LOCAL:
393 return frame_->LocalAt(index);
394
395 case Slot::CONTEXT: {
396 // Follow the context chain if necessary.
397 ASSERT(!tmp.is(cp)); // do not overwrite context register
398 Register context = cp;
399 int chain_length = scope()->ContextChainLength(slot->var()->scope());
400 for (int i = 0; i < chain_length; i++) {
401 // Load the closure.
402 // (All contexts, even 'with' contexts, have a closure,
403 // and it is the same for all contexts inside a function.
404 // There is no need to go to the function context first.)
405 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
406 // Load the function context (which is the incoming, outer context).
407 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
408 context = tmp;
409 }
410 // We may have a 'with' context now. Get the function context.
411 // (In fact this mov may never be the needed, since the scope analysis
412 // may not permit a direct context access in this case and thus we are
413 // always at a function context. However it is safe to dereference be-
414 // cause the function context of a function context is itself. Before
415 // deleting this mov we should try to create a counter-example first,
416 // though...)
417 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
418 return ContextOperand(tmp, index);
419 }
420
421 default:
422 UNREACHABLE();
423 return MemOperand(r0, 0);
424 }
425}
426
427
428MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
429 Slot* slot,
430 Register tmp,
431 Register tmp2,
432 JumpTarget* slow) {
433 ASSERT(slot->type() == Slot::CONTEXT);
434 Register context = cp;
435
436 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
437 if (s->num_heap_slots() > 0) {
438 if (s->calls_eval()) {
439 // Check that extension is NULL.
440 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
441 __ tst(tmp2, tmp2);
442 slow->Branch(ne);
443 }
444 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
445 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
446 context = tmp;
447 }
448 }
449 // Check that last extension is NULL.
450 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
451 __ tst(tmp2, tmp2);
452 slow->Branch(ne);
453 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
454 return ContextOperand(tmp, slot->index());
455}
456
457
458// Loads a value on TOS. If it is a boolean value, the result may have been
459// (partially) translated into branches, or it may have set the condition
460// code register. If force_cc is set, the value is forced to set the
461// condition code register and no value is pushed. If the condition code
462// register was set, has_cc() is true and cc_reg_ contains the condition to
463// test for 'true'.
464void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 JumpTarget* true_target,
466 JumpTarget* false_target,
467 bool force_cc) {
468 ASSERT(!has_cc());
469 int original_height = frame_->height();
470
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100471 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000472 Visit(x);
473
474 // If we hit a stack overflow, we may not have actually visited
475 // the expression. In that case, we ensure that we have a
476 // valid-looking frame state because we will continue to generate
477 // code as we unwind the C++ stack.
478 //
479 // It's possible to have both a stack overflow and a valid frame
480 // state (eg, a subexpression overflowed, visiting it returned
481 // with a dummied frame state, and visiting this expression
482 // returned with a normal-looking state).
483 if (HasStackOverflow() &&
484 has_valid_frame() &&
485 !has_cc() &&
486 frame_->height() == original_height) {
487 true_target->Jump();
488 }
489 }
490 if (force_cc && frame_ != NULL && !has_cc()) {
491 // Convert the TOS value to a boolean in the condition code register.
492 ToBoolean(true_target, false_target);
493 }
494 ASSERT(!force_cc || !has_valid_frame() || has_cc());
495 ASSERT(!has_valid_frame() ||
496 (has_cc() && frame_->height() == original_height) ||
497 (!has_cc() && frame_->height() == original_height + 1));
498}
499
500
Steve Blockd0582a62009-12-15 09:54:21 +0000501void CodeGenerator::Load(Expression* expr) {
Iain Merrick75681382010-08-19 15:07:18 +0100502 // We generally assume that we are not in a spilled scope for most
503 // of the code generator. A failure to ensure this caused issue 815
504 // and this assert is designed to catch similar issues.
505 frame_->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000506#ifdef DEBUG
507 int original_height = frame_->height();
508#endif
509 JumpTarget true_target;
510 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000511 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000512
513 if (has_cc()) {
514 // Convert cc_reg_ into a boolean value.
515 JumpTarget loaded;
516 JumpTarget materialize_true;
517 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100518 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000519 loaded.Jump();
520 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100521 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000522 loaded.Bind();
523 cc_reg_ = al;
524 }
525
526 if (true_target.is_linked() || false_target.is_linked()) {
527 // We have at least one condition value that has been "translated"
528 // into a branch, thus it needs to be loaded explicitly.
529 JumpTarget loaded;
530 if (frame_ != NULL) {
531 loaded.Jump(); // Don't lose the current TOS.
532 }
533 bool both = true_target.is_linked() && false_target.is_linked();
534 // Load "true" if necessary.
535 if (true_target.is_linked()) {
536 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100537 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000538 }
539 // If both "true" and "false" need to be loaded jump across the code for
540 // "false".
541 if (both) {
542 loaded.Jump();
543 }
544 // Load "false" if necessary.
545 if (false_target.is_linked()) {
546 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100547 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000548 }
549 // A value is loaded on all paths reaching this point.
550 loaded.Bind();
551 }
552 ASSERT(has_valid_frame());
553 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100554 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000555}
556
557
558void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100559 Register reg = frame_->GetTOSRegister();
560 __ ldr(reg, GlobalObject());
561 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000562}
563
564
565void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100566 Register reg = frame_->GetTOSRegister();
567 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
568 __ ldr(reg,
569 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
570 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000571}
572
573
Steve Block6ded16b2010-05-10 14:33:55 +0100574ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
575 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
576 ASSERT(scope()->arguments_shadow() != NULL);
577 // We don't want to do lazy arguments allocation for functions that
578 // have heap-allocated contexts, because it interfers with the
579 // uninitialized const tracking in the context objects.
580 return (scope()->num_heap_slots() > 0)
581 ? EAGER_ARGUMENTS_ALLOCATION
582 : LAZY_ARGUMENTS_ALLOCATION;
583}
584
585
586void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100587 ArgumentsAllocationMode mode = ArgumentsMode();
588 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
589
590 Comment cmnt(masm_, "[ store arguments object");
591 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
592 // When using lazy arguments allocation, we store the hole value
593 // as a sentinel indicating that the arguments object hasn't been
594 // allocated yet.
Steve Block8defd9f2010-07-08 12:39:36 +0100595 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100596 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100597 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +0100598 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
599 __ ldr(r2, frame_->Function());
600 // The receiver is below the arguments, the return address, and the
601 // frame pointer on the stack.
602 const int kReceiverDisplacement = 2 + scope()->num_parameters();
603 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
604 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
605 frame_->Adjust(3);
606 __ Push(r2, r1, r0);
607 frame_->CallStub(&stub, 3);
608 frame_->EmitPush(r0);
609 }
610
611 Variable* arguments = scope()->arguments()->var();
612 Variable* shadow = scope()->arguments_shadow()->var();
613 ASSERT(arguments != NULL && arguments->slot() != NULL);
614 ASSERT(shadow != NULL && shadow->slot() != NULL);
615 JumpTarget done;
616 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
617 // We have to skip storing into the arguments slot if it has
618 // already been written to. This can happen if the a function
619 // has a local variable named 'arguments'.
620 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100621 Register arguments = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +0100622 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100623 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100624 done.Branch(ne);
625 }
626 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
627 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
628 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
629}
630
631
Steve Blockd0582a62009-12-15 09:54:21 +0000632void CodeGenerator::LoadTypeofExpression(Expression* expr) {
633 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000634 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000635 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000636 // For a global variable we build the property reference
637 // <global>.<variable> and perform a (regular non-contextual) property
638 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
640 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000642 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100643 ref.GetValue();
Steve Blockd0582a62009-12-15 09:54:21 +0000644 } else if (variable != NULL && variable->slot() != NULL) {
645 // For a variable that rewrites to a slot, we signal it is the immediate
646 // subexpression of a typeof.
Steve Block6ded16b2010-05-10 14:33:55 +0100647 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000648 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000649 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100650 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 }
652}
653
654
Leon Clarked91b9f72010-01-27 17:25:45 +0000655Reference::Reference(CodeGenerator* cgen,
656 Expression* expression,
657 bool persist_after_get)
658 : cgen_(cgen),
659 expression_(expression),
660 type_(ILLEGAL),
661 persist_after_get_(persist_after_get) {
Iain Merrick75681382010-08-19 15:07:18 +0100662 // We generally assume that we are not in a spilled scope for most
663 // of the code generator. A failure to ensure this caused issue 815
664 // and this assert is designed to catch similar issues.
665 cgen->frame()->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000666 cgen->LoadReference(this);
667}
668
669
670Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000671 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000672}
673
674
675void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 Comment cmnt(masm_, "[ LoadReference");
677 Expression* e = ref->expression();
678 Property* property = e->AsProperty();
679 Variable* var = e->AsVariableProxy()->AsVariable();
680
681 if (property != NULL) {
682 // The expression is either a property or a variable proxy that rewrites
683 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100684 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000685 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000686 ref->set_type(Reference::NAMED);
687 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100688 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000689 ref->set_type(Reference::KEYED);
690 }
691 } else if (var != NULL) {
692 // The expression is a variable proxy that does not rewrite to a
693 // property. Global variables are treated as named property references.
694 if (var->is_global()) {
695 LoadGlobal();
696 ref->set_type(Reference::NAMED);
697 } else {
698 ASSERT(var->slot() != NULL);
699 ref->set_type(Reference::SLOT);
700 }
701 } else {
702 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100703 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000704 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
705 }
706}
707
708
709void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000711 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100712 if (size == 0) return;
713
714 // Pop a reference from the stack while preserving TOS.
715 VirtualFrame::RegisterAllocationScope scope(this);
716 Comment cmnt(masm_, "[ UnloadReference");
717 if (size > 0) {
718 Register tos = frame_->PopToRegister();
719 frame_->Drop(size);
720 frame_->EmitPush(tos);
721 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000722}
723
724
725// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
726// register to a boolean in the condition code register. The code
727// may jump to 'false_target' in case the register converts to 'false'.
728void CodeGenerator::ToBoolean(JumpTarget* true_target,
729 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000730 // Note: The generated code snippet does not change stack variables.
731 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100732 bool known_smi = frame_->KnownSmiAt(0);
733 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000734
735 // Fast case checks
736
737 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100738 if (!known_smi) {
739 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
740 __ cmp(tos, ip);
741 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000742
Steve Block8defd9f2010-07-08 12:39:36 +0100743 // Check if the value is 'true'.
744 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
745 __ cmp(tos, ip);
746 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000747
Steve Block8defd9f2010-07-08 12:39:36 +0100748 // Check if the value is 'undefined'.
749 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
750 __ cmp(tos, ip);
751 false_target->Branch(eq);
752 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000753
754 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100755 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000756
Steve Block8defd9f2010-07-08 12:39:36 +0100757 if (!known_smi) {
758 false_target->Branch(eq);
759 __ tst(tos, Operand(kSmiTagMask));
760 true_target->Branch(eq);
761
Iain Merrick75681382010-08-19 15:07:18 +0100762 // Slow case.
763 if (CpuFeatures::IsSupported(VFP3)) {
764 CpuFeatures::Scope scope(VFP3);
765 // Implements the slow case by using ToBooleanStub.
766 // The ToBooleanStub takes a single argument, and
767 // returns a non-zero value for true, or zero for false.
768 // Both the argument value and the return value use the
769 // register assigned to tos_
770 ToBooleanStub stub(tos);
771 frame_->CallStub(&stub, 0);
772 // Convert the result in "tos" to a condition code.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100773 __ cmp(tos, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +0100774 } else {
775 // Implements slow case by calling the runtime.
776 frame_->EmitPush(tos);
777 frame_->CallRuntime(Runtime::kToBool, 1);
778 // Convert the result (r0) to a condition code.
779 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
780 __ cmp(r0, ip);
781 }
Steve Block8defd9f2010-07-08 12:39:36 +0100782 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000783
784 cc_reg_ = ne;
785}
786
787
788void CodeGenerator::GenericBinaryOperation(Token::Value op,
789 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100790 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000791 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100792 // top of virtual frame: y
793 // 2nd elt. on virtual frame : x
794 // result : top of virtual frame
795
796 // Stub is entered with a call: 'return address' is in lr.
797 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100798 case Token::ADD:
799 case Token::SUB:
800 if (inline_smi) {
801 JumpTarget done;
802 Register rhs = frame_->PopToRegister();
803 Register lhs = frame_->PopToRegister(rhs);
804 Register scratch = VirtualFrame::scratch0();
805 __ orr(scratch, rhs, Operand(lhs));
806 // Check they are both small and positive.
807 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
808 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100809 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100810 if (op == Token::ADD) {
811 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
812 } else {
813 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
814 }
815 done.Branch(eq);
816 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
817 frame_->SpillAll();
818 frame_->CallStub(&stub, 0);
819 done.Bind();
820 frame_->EmitPush(r0);
821 break;
822 } else {
823 // Fall through!
824 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000825 case Token::BIT_OR:
826 case Token::BIT_AND:
827 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100828 if (inline_smi) {
829 bool rhs_is_smi = frame_->KnownSmiAt(0);
830 bool lhs_is_smi = frame_->KnownSmiAt(1);
831 Register rhs = frame_->PopToRegister();
832 Register lhs = frame_->PopToRegister(rhs);
833 Register smi_test_reg;
834 Condition cond;
835 if (!rhs_is_smi || !lhs_is_smi) {
836 if (rhs_is_smi) {
837 smi_test_reg = lhs;
838 } else if (lhs_is_smi) {
839 smi_test_reg = rhs;
840 } else {
841 smi_test_reg = VirtualFrame::scratch0();
842 __ orr(smi_test_reg, rhs, Operand(lhs));
843 }
844 // Check they are both Smis.
845 __ tst(smi_test_reg, Operand(kSmiTagMask));
846 cond = eq;
847 } else {
848 cond = al;
849 }
850 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
851 if (op == Token::BIT_OR) {
852 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
853 } else if (op == Token::BIT_AND) {
854 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
855 } else {
856 ASSERT(op == Token::BIT_XOR);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100857 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100858 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
859 }
860 if (cond != al) {
861 JumpTarget done;
862 done.Branch(cond);
863 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
864 frame_->SpillAll();
865 frame_->CallStub(&stub, 0);
866 done.Bind();
867 }
868 frame_->EmitPush(r0);
869 break;
870 } else {
871 // Fall through!
872 }
873 case Token::MUL:
874 case Token::DIV:
875 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000876 case Token::SHL:
877 case Token::SHR:
878 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100879 Register rhs = frame_->PopToRegister();
880 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100881 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
882 frame_->SpillAll();
883 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100884 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000885 break;
886 }
887
Steve Block6ded16b2010-05-10 14:33:55 +0100888 case Token::COMMA: {
889 Register scratch = frame_->PopToRegister();
890 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000891 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100892 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000893 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100894 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000895
896 default:
897 // Other cases should have been handled before this point.
898 UNREACHABLE();
899 break;
900 }
901}
902
903
904class DeferredInlineSmiOperation: public DeferredCode {
905 public:
906 DeferredInlineSmiOperation(Token::Value op,
907 int value,
908 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100909 OverwriteMode overwrite_mode,
910 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000911 : op_(op),
912 value_(value),
913 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100914 overwrite_mode_(overwrite_mode),
915 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000916 set_comment("[ DeferredInlinedSmiOperation");
917 }
918
919 virtual void Generate();
Iain Merrick9ac36c92010-09-13 15:29:50 +0100920 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
921 // Exit(). Currently on ARM SaveRegisters() and RestoreRegisters() are empty
922 // methods, it is the responsibility of the deferred code to save and restore
923 // registers.
924 virtual bool AutoSaveAndRestore() { return false; }
925
926 void JumpToNonSmiInput(Condition cond);
927 void JumpToAnswerOutOfRange(Condition cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928
929 private:
Iain Merrick9ac36c92010-09-13 15:29:50 +0100930 void GenerateNonSmiInput();
931 void GenerateAnswerOutOfRange();
932 void WriteNonSmiAnswer(Register answer,
933 Register heap_number,
934 Register scratch);
935
Steve Blocka7e24c12009-10-30 11:49:00 +0000936 Token::Value op_;
937 int value_;
938 bool reversed_;
939 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100940 Register tos_register_;
Iain Merrick9ac36c92010-09-13 15:29:50 +0100941 Label non_smi_input_;
942 Label answer_out_of_range_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000943};
944
945
Iain Merrick9ac36c92010-09-13 15:29:50 +0100946// For bit operations we try harder and handle the case where the input is not
947// a Smi but a 32bits integer without calling the generic stub.
948void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
949 ASSERT(Token::IsBitOp(op_));
950
951 __ b(cond, &non_smi_input_);
952}
953
954
955// For bit operations the result is always 32bits so we handle the case where
956// the result does not fit in a Smi without calling the generic stub.
957void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
958 ASSERT(Token::IsBitOp(op_));
959
960 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) {
961 // >>> requires an unsigned to double conversion and the non VFP code
962 // does not support this conversion.
963 __ b(cond, entry_label());
964 } else {
965 __ b(cond, &answer_out_of_range_);
966 }
967}
968
Steve Block8defd9f2010-07-08 12:39:36 +0100969
970// On entry the non-constant side of the binary operation is in tos_register_
971// and the constant smi side is nowhere. The tos_register_ is not used by the
972// virtual frame. On exit the answer is in the tos_register_ and the virtual
973// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000974void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100975 VirtualFrame copied_frame(*frame_state()->frame());
976 copied_frame.SpillAll();
977
Steve Block6ded16b2010-05-10 14:33:55 +0100978 Register lhs = r1;
979 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000980 switch (op_) {
981 case Token::ADD: {
982 // Revert optimistic add.
983 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100984 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000985 __ mov(r1, Operand(Smi::FromInt(value_)));
986 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100987 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 __ mov(r0, Operand(Smi::FromInt(value_)));
989 }
990 break;
991 }
992
993 case Token::SUB: {
994 // Revert optimistic sub.
995 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100996 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000997 __ mov(r1, Operand(Smi::FromInt(value_)));
998 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100999 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001000 __ mov(r0, Operand(Smi::FromInt(value_)));
1001 }
1002 break;
1003 }
1004
1005 // For these operations there is no optimistic operation that needs to be
1006 // reverted.
1007 case Token::MUL:
1008 case Token::MOD:
1009 case Token::BIT_OR:
1010 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +01001011 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +00001012 case Token::SHL:
1013 case Token::SHR:
1014 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +01001015 if (tos_register_.is(r1)) {
1016 __ mov(r0, Operand(Smi::FromInt(value_)));
1017 } else {
1018 ASSERT(tos_register_.is(r0));
1019 __ mov(r1, Operand(Smi::FromInt(value_)));
1020 }
1021 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +01001022 lhs = r0;
1023 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +00001024 }
1025 break;
1026 }
1027
1028 default:
1029 // Other cases should have been handled before this point.
1030 UNREACHABLE();
1031 break;
1032 }
1033
Steve Block6ded16b2010-05-10 14:33:55 +01001034 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001036
Steve Block6ded16b2010-05-10 14:33:55 +01001037 // The generic stub returns its value in r0, but that's not
1038 // necessarily what we want. We want whatever the inlined code
1039 // expected, which is that the answer is in the same register as
1040 // the operand was.
1041 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001042
1043 // The tos register was not in use for the virtual frame that we
1044 // came into this function with, so we can merge back to that frame
1045 // without trashing it.
1046 copied_frame.MergeTo(frame_state()->frame());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001047
1048 Exit();
1049
1050 if (non_smi_input_.is_linked()) {
1051 GenerateNonSmiInput();
1052 }
1053
1054 if (answer_out_of_range_.is_linked()) {
1055 GenerateAnswerOutOfRange();
1056 }
1057}
1058
1059
1060// Convert and write the integer answer into heap_number.
1061void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1062 Register heap_number,
1063 Register scratch) {
1064 if (CpuFeatures::IsSupported(VFP3)) {
1065 CpuFeatures::Scope scope(VFP3);
1066 __ vmov(s0, answer);
1067 if (op_ == Token::SHR) {
1068 __ vcvt_f64_u32(d0, s0);
1069 } else {
1070 __ vcvt_f64_s32(d0, s0);
1071 }
1072 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1073 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1074 } else {
1075 WriteInt32ToHeapNumberStub stub(answer, heap_number, scratch);
1076 __ CallStub(&stub);
1077 }
1078}
1079
1080
1081void DeferredInlineSmiOperation::GenerateNonSmiInput() {
1082 // We know the left hand side is not a Smi and the right hand side is an
1083 // immediate value (value_) which can be represented as a Smi. We only
1084 // handle bit operations.
1085 ASSERT(Token::IsBitOp(op_));
1086
1087 if (FLAG_debug_code) {
1088 __ Abort("Should not fall through!");
1089 }
1090
1091 __ bind(&non_smi_input_);
1092 if (FLAG_debug_code) {
1093 __ AbortIfSmi(tos_register_);
1094 }
1095
1096 // This routine uses the registers from r2 to r6. At the moment they are
1097 // not used by the register allocator, but when they are it should use
1098 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1099
1100 Register heap_number_map = r7;
1101 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1102 __ ldr(r3, FieldMemOperand(tos_register_, HeapNumber::kMapOffset));
1103 __ cmp(r3, heap_number_map);
1104 // Not a number, fall back to the GenericBinaryOpStub.
1105 __ b(ne, entry_label());
1106
1107 Register int32 = r2;
1108 // Not a 32bits signed int, fall back to the GenericBinaryOpStub.
1109 __ ConvertToInt32(tos_register_, int32, r4, r5, entry_label());
1110
1111 // tos_register_ (r0 or r1): Original heap number.
1112 // int32: signed 32bits int.
1113
1114 Label result_not_a_smi;
1115 int shift_value = value_ & 0x1f;
1116 switch (op_) {
1117 case Token::BIT_OR: __ orr(int32, int32, Operand(value_)); break;
1118 case Token::BIT_XOR: __ eor(int32, int32, Operand(value_)); break;
1119 case Token::BIT_AND: __ and_(int32, int32, Operand(value_)); break;
1120 case Token::SAR:
1121 ASSERT(!reversed_);
1122 if (shift_value != 0) {
1123 __ mov(int32, Operand(int32, ASR, shift_value));
1124 }
1125 break;
1126 case Token::SHR:
1127 ASSERT(!reversed_);
1128 if (shift_value != 0) {
1129 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1130 } else {
1131 // SHR is special because it is required to produce a positive answer.
1132 __ cmp(int32, Operand(0, RelocInfo::NONE));
1133 }
1134 if (CpuFeatures::IsSupported(VFP3)) {
1135 __ b(mi, &result_not_a_smi);
1136 } else {
1137 // Non VFP code cannot convert from unsigned to double, so fall back
1138 // to GenericBinaryOpStub.
1139 __ b(mi, entry_label());
1140 }
1141 break;
1142 case Token::SHL:
1143 ASSERT(!reversed_);
1144 if (shift_value != 0) {
1145 __ mov(int32, Operand(int32, LSL, shift_value));
1146 }
1147 break;
1148 default: UNREACHABLE();
1149 }
1150 // Check that the *signed* result fits in a smi. Not necessary for AND, SAR
1151 // if the shift if more than 0 or SHR if the shit is more than 1.
1152 if (!( (op_ == Token::AND) ||
1153 ((op_ == Token::SAR) && (shift_value > 0)) ||
1154 ((op_ == Token::SHR) && (shift_value > 1)))) {
1155 __ add(r3, int32, Operand(0x40000000), SetCC);
1156 __ b(mi, &result_not_a_smi);
1157 }
1158 __ mov(tos_register_, Operand(int32, LSL, kSmiTagSize));
1159 Exit();
1160
1161 if (result_not_a_smi.is_linked()) {
1162 __ bind(&result_not_a_smi);
1163 if (overwrite_mode_ != OVERWRITE_LEFT) {
1164 ASSERT((overwrite_mode_ == NO_OVERWRITE) ||
1165 (overwrite_mode_ == OVERWRITE_RIGHT));
1166 // If the allocation fails, fall back to the GenericBinaryOpStub.
1167 __ AllocateHeapNumber(r4, r5, r6, heap_number_map, entry_label());
1168 // Nothing can go wrong now, so overwrite tos.
1169 __ mov(tos_register_, Operand(r4));
1170 }
1171
1172 // int32: answer as signed 32bits integer.
1173 // tos_register_: Heap number to write the answer into.
1174 WriteNonSmiAnswer(int32, tos_register_, r3);
1175
1176 Exit();
1177 }
1178}
1179
1180
1181void DeferredInlineSmiOperation::GenerateAnswerOutOfRange() {
1182 // The input from a bitwise operation were Smis but the result cannot fit
1183 // into a Smi, so we store it into a heap number. tos_resgiter_ holds the
1184 // result to be converted.
1185 ASSERT(Token::IsBitOp(op_));
1186 ASSERT(!reversed_);
1187
1188 if (FLAG_debug_code) {
1189 __ Abort("Should not fall through!");
1190 }
1191
1192 __ bind(&answer_out_of_range_);
1193 if (((value_ & 0x1f) == 0) && (op_ == Token::SHR)) {
1194 // >>> 0 is a special case where the result is already tagged but wrong
1195 // because the Smi is negative. We untag it.
1196 __ mov(tos_register_, Operand(tos_register_, ASR, kSmiTagSize));
1197 }
1198
1199 // This routine uses the registers from r2 to r6. At the moment they are
1200 // not used by the register allocator, but when they are it should use
1201 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1202
1203 // Allocate the result heap number.
1204 Register heap_number_map = r7;
1205 Register heap_number = r4;
1206 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1207 // If the allocation fails, fall back to the GenericBinaryOpStub.
1208 __ AllocateHeapNumber(heap_number, r5, r6, heap_number_map, entry_label());
1209 WriteNonSmiAnswer(tos_register_, heap_number, r3);
1210 __ mov(tos_register_, Operand(heap_number));
1211
1212 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001213}
1214
1215
1216static bool PopCountLessThanEqual2(unsigned int x) {
1217 x &= x - 1;
1218 return (x & (x - 1)) == 0;
1219}
1220
1221
1222// Returns the index of the lowest bit set.
1223static int BitPosition(unsigned x) {
1224 int bit_posn = 0;
1225 while ((x & 0xf) == 0) {
1226 bit_posn += 4;
1227 x >>= 4;
1228 }
1229 while ((x & 1) == 0) {
1230 bit_posn++;
1231 x >>= 1;
1232 }
1233 return bit_posn;
1234}
1235
1236
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001237// Can we multiply by x with max two shifts and an add.
1238// This answers yes to all integers from 2 to 10.
1239static bool IsEasyToMultiplyBy(int x) {
1240 if (x < 2) return false; // Avoid special cases.
1241 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
1242 if (IsPowerOf2(x)) return true; // Simple shift.
1243 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
1244 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
1245 return false;
1246}
1247
1248
1249// Can multiply by anything that IsEasyToMultiplyBy returns true for.
1250// Source and destination may be the same register. This routine does
1251// not set carry and overflow the way a mul instruction would.
1252static void InlineMultiplyByKnownInt(MacroAssembler* masm,
1253 Register source,
1254 Register destination,
1255 int known_int) {
1256 if (IsPowerOf2(known_int)) {
1257 masm->mov(destination, Operand(source, LSL, BitPosition(known_int)));
1258 } else if (PopCountLessThanEqual2(known_int)) {
1259 int first_bit = BitPosition(known_int);
1260 int second_bit = BitPosition(known_int ^ (1 << first_bit));
1261 masm->add(destination, source,
1262 Operand(source, LSL, second_bit - first_bit));
1263 if (first_bit != 0) {
1264 masm->mov(destination, Operand(destination, LSL, first_bit));
1265 }
1266 } else {
1267 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
1268 int the_bit = BitPosition(known_int + 1);
1269 masm->rsb(destination, source, Operand(source, LSL, the_bit));
1270 }
1271}
1272
1273
Steve Blocka7e24c12009-10-30 11:49:00 +00001274void CodeGenerator::SmiOperation(Token::Value op,
1275 Handle<Object> value,
1276 bool reversed,
1277 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 int int_value = Smi::cast(*value)->value();
1279
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001280 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1281
Steve Block6ded16b2010-05-10 14:33:55 +01001282 bool something_to_inline;
1283 switch (op) {
1284 case Token::ADD:
1285 case Token::SUB:
1286 case Token::BIT_AND:
1287 case Token::BIT_OR:
1288 case Token::BIT_XOR: {
1289 something_to_inline = true;
1290 break;
1291 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001292 case Token::SHL: {
1293 something_to_inline = (both_sides_are_smi || !reversed);
1294 break;
1295 }
Steve Block6ded16b2010-05-10 14:33:55 +01001296 case Token::SHR:
1297 case Token::SAR: {
1298 if (reversed) {
1299 something_to_inline = false;
1300 } else {
1301 something_to_inline = true;
1302 }
1303 break;
1304 }
1305 case Token::MOD: {
1306 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1307 something_to_inline = false;
1308 } else {
1309 something_to_inline = true;
1310 }
1311 break;
1312 }
1313 case Token::MUL: {
1314 if (!IsEasyToMultiplyBy(int_value)) {
1315 something_to_inline = false;
1316 } else {
1317 something_to_inline = true;
1318 }
1319 break;
1320 }
1321 default: {
1322 something_to_inline = false;
1323 break;
1324 }
1325 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001326
Steve Block6ded16b2010-05-10 14:33:55 +01001327 if (!something_to_inline) {
1328 if (!reversed) {
1329 // Push the rhs onto the virtual frame by putting it in a TOS register.
1330 Register rhs = frame_->GetTOSRegister();
1331 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001332 frame_->EmitPush(rhs, TypeInfo::Smi());
1333 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001334 } else {
1335 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1336 // at most one pop, the rest takes place in TOS registers.
1337 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1338 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1339 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001340 frame_->EmitPush(lhs, TypeInfo::Smi());
1341 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1342 frame_->EmitPush(rhs, t);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001343 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI,
1344 GenericBinaryOpStub::kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001345 }
1346 return;
1347 }
1348
1349 // We move the top of stack to a register (normally no move is invoved).
1350 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001351 switch (op) {
1352 case Token::ADD: {
1353 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001354 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001355
Steve Block6ded16b2010-05-10 14:33:55 +01001356 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001357 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001358 if (!both_sides_are_smi) {
1359 __ tst(tos, Operand(kSmiTagMask));
1360 deferred->Branch(ne);
1361 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001362 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001363 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 break;
1365 }
1366
1367 case Token::SUB: {
1368 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001369 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001370
1371 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001372 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001373 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001374 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001375 }
1376 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001377 if (!both_sides_are_smi) {
1378 __ tst(tos, Operand(kSmiTagMask));
1379 deferred->Branch(ne);
1380 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001382 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 break;
1384 }
1385
1386
1387 case Token::BIT_OR:
1388 case Token::BIT_XOR:
1389 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001390 if (both_sides_are_smi) {
1391 switch (op) {
1392 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1393 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001394 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001395 default: UNREACHABLE();
1396 }
1397 frame_->EmitPush(tos, TypeInfo::Smi());
1398 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001399 DeferredInlineSmiOperation* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001400 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1401 __ tst(tos, Operand(kSmiTagMask));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001402 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001403 switch (op) {
1404 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1405 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001406 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001407 default: UNREACHABLE();
1408 }
1409 deferred->BindExit();
1410 TypeInfo result_type =
1411 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
1412 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001413 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001414 break;
1415 }
1416
1417 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001418 if (reversed) {
1419 ASSERT(both_sides_are_smi);
1420 int max_shift = 0;
1421 int max_result = int_value == 0 ? 1 : int_value;
1422 while (Smi::IsValid(max_result << 1)) {
1423 max_shift++;
1424 max_result <<= 1;
1425 }
1426 DeferredCode* deferred =
1427 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1428 // Mask off the last 5 bits of the shift operand (rhs). This is part
1429 // of the definition of shift in JS and we know we have a Smi so we
1430 // can safely do this. The masked version gets passed to the
1431 // deferred code, but that makes no difference.
1432 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1433 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1434 deferred->Branch(ge);
1435 Register scratch = VirtualFrame::scratch0();
1436 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1437 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1438 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1439 deferred->BindExit();
1440 TypeInfo result = TypeInfo::Integer32();
1441 frame_->EmitPush(tos, result);
1442 break;
1443 }
1444 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001445 case Token::SHR:
1446 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001447 ASSERT(!reversed);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001448 int shift_value = int_value & 0x1f;
Steve Block791712a2010-08-27 10:21:07 +01001449 TypeInfo result = TypeInfo::Number();
1450
1451 if (op == Token::SHR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001452 if (shift_value > 1) {
Steve Block791712a2010-08-27 10:21:07 +01001453 result = TypeInfo::Smi();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001454 } else if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001455 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001456 }
Steve Block791712a2010-08-27 10:21:07 +01001457 } else if (op == Token::SAR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001458 if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001459 result = TypeInfo::Smi();
1460 } else {
1461 result = TypeInfo::Integer32();
1462 }
1463 } else {
1464 ASSERT(op == Token::SHL);
1465 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001466 }
Steve Block791712a2010-08-27 10:21:07 +01001467
Iain Merrick9ac36c92010-09-13 15:29:50 +01001468 DeferredInlineSmiOperation* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001469 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001470 if (!both_sides_are_smi) {
1471 __ tst(tos, Operand(kSmiTagMask));
1472 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001473 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001474 switch (op) {
1475 case Token::SHL: {
1476 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001477 Register scratch = VirtualFrame::scratch0();
Kristian Monsen25f61362010-05-21 11:50:48 +01001478 int adjusted_shift = shift_value - kSmiTagSize;
1479 ASSERT(adjusted_shift >= 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001480
Kristian Monsen25f61362010-05-21 11:50:48 +01001481 if (adjusted_shift != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001482 __ mov(tos, Operand(tos, LSL, adjusted_shift));
Kristian Monsen25f61362010-05-21 11:50:48 +01001483 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001484 // Check that the *signed* result fits in a smi.
1485 __ add(scratch, tos, Operand(0x40000000), SetCC);
1486 deferred->JumpToAnswerOutOfRange(mi);
1487 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001488 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001489 break;
1490 }
1491 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001492 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001493 Register scratch = VirtualFrame::scratch0();
Kristian Monsen25f61362010-05-21 11:50:48 +01001494 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Remove tag.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001495 __ mov(tos, Operand(scratch, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001496 if (shift_value == 1) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001497 // Check that the *unsigned* result fits in a smi.
1498 // Neither of the two high-order bits can be set:
Kristian Monsen25f61362010-05-21 11:50:48 +01001499 // - 0x80000000: high bit would be lost when smi tagging
Iain Merrick9ac36c92010-09-13 15:29:50 +01001500 // - 0x40000000: this number would convert to negative when Smi
1501 // tagging.
1502 // These two cases can only happen with shifts by 0 or 1 when
1503 // handed a valid smi.
1504 __ tst(tos, Operand(0xc0000000));
1505 if (!CpuFeatures::IsSupported(VFP3)) {
1506 // If the unsigned result does not fit in a Smi, we require an
1507 // unsigned to double conversion. Without VFP V8 has to fall
1508 // back to the runtime. The deferred code will expect tos
1509 // to hold the original Smi to be shifted.
1510 __ mov(tos, Operand(scratch, LSL, kSmiTagSize), LeaveCC, ne);
1511 }
1512 deferred->JumpToAnswerOutOfRange(ne);
Kristian Monsen25f61362010-05-21 11:50:48 +01001513 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001514 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
1515 } else {
1516 __ cmp(tos, Operand(0, RelocInfo::NONE));
1517 deferred->JumpToAnswerOutOfRange(mi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001518 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001519 break;
1520 }
1521 case Token::SAR: {
1522 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001523 // Do the shift and the tag removal in one operation. If the shift
Kristian Monsen25f61362010-05-21 11:50:48 +01001524 // is 31 bits (the highest possible value) then we emit the
Iain Merrick9ac36c92010-09-13 15:29:50 +01001525 // instruction as a shift by 0 which in the ARM ISA means shift
1526 // arithmetically by 32.
Kristian Monsen25f61362010-05-21 11:50:48 +01001527 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
Kristian Monsen25f61362010-05-21 11:50:48 +01001528 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001529 }
1530 break;
1531 }
1532 default: UNREACHABLE();
1533 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001534 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001535 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001536 break;
1537 }
1538
1539 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001540 ASSERT(!reversed);
1541 ASSERT(int_value >= 2);
1542 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001543 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001544 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001545 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001546 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1548 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001549 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001550 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001551 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1552 frame_->EmitPush(
1553 tos,
1554 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001555 break;
1556 }
1557
1558 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001559 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001560 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001561 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001562 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1563 max_smi_that_wont_overflow <<= kSmiTagSize;
1564 unsigned mask = 0x80000000u;
1565 while ((mask & max_smi_that_wont_overflow) == 0) {
1566 mask |= mask >> 1;
1567 }
1568 mask |= kSmiTagMask;
1569 // This does a single mask that checks for a too high value in a
1570 // conservative way and for a non-Smi. It also filters out negative
1571 // numbers, unfortunately, but since this code is inline we prefer
1572 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001573 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001574 deferred->Branch(ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001575 InlineMultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001576 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001577 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001578 break;
1579 }
1580
1581 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001582 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001583 break;
1584 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001585}
1586
1587
1588void CodeGenerator::Comparison(Condition cc,
1589 Expression* left,
1590 Expression* right,
1591 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001592 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001593
Steve Block6ded16b2010-05-10 14:33:55 +01001594 if (left != NULL) Load(left);
1595 if (right != NULL) Load(right);
1596
Steve Blocka7e24c12009-10-30 11:49:00 +00001597 // sp[0] : y
1598 // sp[1] : x
1599 // result : cc register
1600
1601 // Strict only makes sense for equality comparisons.
1602 ASSERT(!strict || cc == eq);
1603
Steve Block6ded16b2010-05-10 14:33:55 +01001604 Register lhs;
1605 Register rhs;
1606
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001607 bool lhs_is_smi;
1608 bool rhs_is_smi;
1609
Steve Block6ded16b2010-05-10 14:33:55 +01001610 // We load the top two stack positions into registers chosen by the virtual
1611 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001612 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1613 if (cc == gt || cc == le) {
1614 cc = ReverseCondition(cc);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001615 lhs_is_smi = frame_->KnownSmiAt(0);
1616 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001617 lhs = frame_->PopToRegister();
1618 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001619 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001620 rhs_is_smi = frame_->KnownSmiAt(0);
1621 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001622 rhs = frame_->PopToRegister();
1623 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001624 }
Steve Block6ded16b2010-05-10 14:33:55 +01001625
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001626 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1627
Steve Block6ded16b2010-05-10 14:33:55 +01001628 ASSERT(rhs.is(r0) || rhs.is(r1));
1629 ASSERT(lhs.is(r0) || lhs.is(r1));
1630
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001631 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001632
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001633 if (!both_sides_are_smi) {
1634 // Now we have the two sides in r0 and r1. We flush any other registers
1635 // because the stub doesn't know about register allocation.
1636 frame_->SpillAll();
1637 Register scratch = VirtualFrame::scratch0();
1638 Register smi_test_reg;
1639 if (lhs_is_smi) {
1640 smi_test_reg = rhs;
1641 } else if (rhs_is_smi) {
1642 smi_test_reg = lhs;
1643 } else {
1644 __ orr(scratch, lhs, Operand(rhs));
1645 smi_test_reg = scratch;
1646 }
1647 __ tst(smi_test_reg, Operand(kSmiTagMask));
1648 JumpTarget smi;
1649 smi.Branch(eq);
1650
1651 // Perform non-smi comparison by stub.
1652 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1653 // We call with 0 args because there are 0 on the stack.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001654 CompareStub stub(cc, strict, kBothCouldBeNaN, true, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001655 frame_->CallStub(&stub, 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001656 __ cmp(r0, Operand(0, RelocInfo::NONE));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001657 exit.Jump();
1658
1659 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001660 }
1661
Steve Blocka7e24c12009-10-30 11:49:00 +00001662 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001663 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001664
1665 exit.Bind();
1666 cc_reg_ = cc;
1667}
1668
1669
Steve Blocka7e24c12009-10-30 11:49:00 +00001670// Call the function on the stack with the given arguments.
1671void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001672 CallFunctionFlags flags,
1673 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001674 // Push the arguments ("left-to-right") on the stack.
1675 int arg_count = args->length();
1676 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001677 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001678 }
1679
1680 // Record the position for debugging purposes.
1681 CodeForSourcePosition(position);
1682
1683 // Use the shared code stub to call the function.
1684 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001685 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001686 frame_->CallStub(&call_function, arg_count + 1);
1687
1688 // Restore context and pop function from the stack.
1689 __ ldr(cp, frame_->Context());
1690 frame_->Drop(); // discard the TOS
1691}
1692
1693
Steve Block6ded16b2010-05-10 14:33:55 +01001694void CodeGenerator::CallApplyLazy(Expression* applicand,
1695 Expression* receiver,
1696 VariableProxy* arguments,
1697 int position) {
1698 // An optimized implementation of expressions of the form
1699 // x.apply(y, arguments).
1700 // If the arguments object of the scope has not been allocated,
1701 // and x.apply is Function.prototype.apply, this optimization
1702 // just copies y and the arguments of the current function on the
1703 // stack, as receiver and arguments, and calls x.
1704 // In the implementation comments, we call x the applicand
1705 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001706
1707 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1708 ASSERT(arguments->IsArguments());
1709
1710 // Load applicand.apply onto the stack. This will usually
1711 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001712 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001713 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001714 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001715 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1716 frame_->EmitPush(r0);
1717
1718 // Load the receiver and the existing arguments object onto the
1719 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001720 Load(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01001721 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
1722
Steve Block8defd9f2010-07-08 12:39:36 +01001723 // At this point the top two stack elements are probably in registers
1724 // since they were just loaded. Ensure they are in regs and get the
1725 // regs.
1726 Register receiver_reg = frame_->Peek2();
1727 Register arguments_reg = frame_->Peek();
1728
1729 // From now on the frame is spilled.
1730 frame_->SpillAll();
1731
Steve Block6ded16b2010-05-10 14:33:55 +01001732 // Emit the source position information after having loaded the
1733 // receiver and the arguments.
1734 CodeForSourcePosition(position);
1735 // Contents of the stack at this point:
1736 // sp[0]: arguments object of the current function or the hole.
1737 // sp[1]: receiver
1738 // sp[2]: applicand.apply
1739 // sp[3]: applicand.
1740
1741 // Check if the arguments object has been lazily allocated
1742 // already. If so, just use that instead of copying the arguments
1743 // from the stack. This also deals with cases where a local variable
1744 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001745 JumpTarget slow;
1746 Label done;
Steve Block6ded16b2010-05-10 14:33:55 +01001747 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001748 __ cmp(ip, arguments_reg);
1749 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001750
1751 Label build_args;
1752 // Get rid of the arguments object probe.
1753 frame_->Drop();
1754 // Stack now has 3 elements on it.
1755 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001756 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001757 // sp[1]: applicand.apply
1758 // sp[2]: applicand.
1759
1760 // Check that the receiver really is a JavaScript object.
Steve Block8defd9f2010-07-08 12:39:36 +01001761 __ BranchOnSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001762 // We allow all JSObjects including JSFunctions. As long as
1763 // JS_FUNCTION_TYPE is the last instance type and it is right
1764 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1765 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001766 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1767 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001768 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001769 __ b(lt, &build_args);
1770
1771 // Check that applicand.apply is Function.prototype.apply.
1772 __ ldr(r0, MemOperand(sp, kPointerSize));
1773 __ BranchOnSmi(r0, &build_args);
1774 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1775 __ b(ne, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001776 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01001777 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1778 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01001779 __ cmp(r1, Operand(apply_code));
1780 __ b(ne, &build_args);
1781
1782 // Check that applicand is a function.
1783 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1784 __ BranchOnSmi(r1, &build_args);
1785 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1786 __ b(ne, &build_args);
1787
1788 // Copy the arguments to this function possibly from the
1789 // adaptor frame below it.
1790 Label invoke, adapted;
1791 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1792 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1793 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1794 __ b(eq, &adapted);
1795
1796 // No arguments adaptor frame. Copy fixed number of arguments.
1797 __ mov(r0, Operand(scope()->num_parameters()));
1798 for (int i = 0; i < scope()->num_parameters(); i++) {
1799 __ ldr(r2, frame_->ParameterAt(i));
1800 __ push(r2);
1801 }
1802 __ jmp(&invoke);
1803
1804 // Arguments adaptor frame present. Copy arguments from there, but
1805 // avoid copying too many arguments to avoid stack overflows.
1806 __ bind(&adapted);
1807 static const uint32_t kArgumentsLimit = 1 * KB;
1808 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1809 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1810 __ mov(r3, r0);
1811 __ cmp(r0, Operand(kArgumentsLimit));
1812 __ b(gt, &build_args);
1813
1814 // Loop through the arguments pushing them onto the execution
1815 // stack. We don't inform the virtual frame of the push, so we don't
1816 // have to worry about getting rid of the elements from the virtual
1817 // frame.
1818 Label loop;
1819 // r3 is a small non-negative integer, due to the test above.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001820 __ cmp(r3, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01001821 __ b(eq, &invoke);
1822 // Compute the address of the first argument.
1823 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1824 __ add(r2, r2, Operand(kPointerSize));
1825 __ bind(&loop);
1826 // Post-decrement argument address by kPointerSize on each iteration.
1827 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1828 __ push(r4);
1829 __ sub(r3, r3, Operand(1), SetCC);
1830 __ b(gt, &loop);
1831
1832 // Invoke the function.
1833 __ bind(&invoke);
1834 ParameterCount actual(r0);
1835 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1836 // Drop applicand.apply and applicand from the stack, and push
1837 // the result of the function call, but leave the spilled frame
1838 // unchanged, with 3 elements, so it is correct when we compile the
1839 // slow-case code.
1840 __ add(sp, sp, Operand(2 * kPointerSize));
1841 __ push(r0);
1842 // Stack now has 1 element:
1843 // sp[0]: result
1844 __ jmp(&done);
1845
1846 // Slow-case: Allocate the arguments object since we know it isn't
1847 // there, and fall-through to the slow-case where we call
1848 // applicand.apply.
1849 __ bind(&build_args);
1850 // Stack now has 3 elements, because we have jumped from where:
1851 // sp[0]: receiver
1852 // sp[1]: applicand.apply
1853 // sp[2]: applicand.
1854 StoreArgumentsObject(false);
1855
1856 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001857 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001858
1859 // Generic computation of x.apply(y, args) with no special optimization.
1860 // Flip applicand.apply and applicand on the stack, so
1861 // applicand looks like the receiver of the applicand.apply call.
1862 // Then process it as a normal function call.
1863 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1864 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001865 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001866
1867 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1868 frame_->CallStub(&call_function, 3);
1869 // The function and its two arguments have been dropped.
1870 frame_->Drop(); // Drop the receiver as well.
1871 frame_->EmitPush(r0);
Ben Murdochbb769b22010-08-11 14:56:33 +01001872 frame_->SpillAll(); // A spilled frame is also jumping to label done.
Steve Block6ded16b2010-05-10 14:33:55 +01001873 // Stack now has 1 element:
1874 // sp[0]: result
1875 __ bind(&done);
1876
1877 // Restore the context register after a call.
1878 __ ldr(cp, frame_->Context());
1879}
1880
1881
Steve Blocka7e24c12009-10-30 11:49:00 +00001882void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 ASSERT(has_cc());
1884 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1885 target->Branch(cc);
1886 cc_reg_ = al;
1887}
1888
1889
1890void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001891 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001892 Comment cmnt(masm_, "[ check stack");
1893 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1894 // Put the lr setup instruction in the delay slot. kInstrSize is added to
1895 // the implicit 8 byte offset that always applies to operations with pc and
1896 // gives a return address 12 bytes down.
1897 masm_->add(lr, pc, Operand(Assembler::kInstrSize));
1898 masm_->cmp(sp, Operand(ip));
1899 StackCheckStub stub;
1900 // Call the stub if lower.
1901 masm_->mov(pc,
1902 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1903 RelocInfo::CODE_TARGET),
1904 LeaveCC,
1905 lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001906}
1907
1908
1909void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1910#ifdef DEBUG
1911 int original_height = frame_->height();
1912#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001914 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001915 }
1916 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1917}
1918
1919
1920void CodeGenerator::VisitBlock(Block* node) {
1921#ifdef DEBUG
1922 int original_height = frame_->height();
1923#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001924 Comment cmnt(masm_, "[ Block");
1925 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001926 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001927 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001928 if (node->break_target()->is_linked()) {
1929 node->break_target()->Bind();
1930 }
1931 node->break_target()->Unuse();
1932 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1933}
1934
1935
1936void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001937 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001938 frame_->EmitPush(Operand(pairs));
1939 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1940
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1942 // The result is discarded.
1943}
1944
1945
1946void CodeGenerator::VisitDeclaration(Declaration* node) {
1947#ifdef DEBUG
1948 int original_height = frame_->height();
1949#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001950 Comment cmnt(masm_, "[ Declaration");
1951 Variable* var = node->proxy()->var();
1952 ASSERT(var != NULL); // must have been resolved
1953 Slot* slot = var->slot();
1954
1955 // If it was not possible to allocate the variable at compile time,
1956 // we need to "declare" it at runtime to make sure it actually
1957 // exists in the local context.
1958 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1959 // Variables with a "LOOKUP" slot were introduced as non-locals
1960 // during variable resolution and must have mode DYNAMIC.
1961 ASSERT(var->is_dynamic());
1962 // For now, just do a runtime call.
1963 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001964 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001965 // Declaration nodes are always declared in only two modes.
1966 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1967 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001968 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001969 // Push initial value, if any.
1970 // Note: For variables we must not push an initial value (such as
1971 // 'undefined') because we may have a (legal) redeclaration and we
1972 // must not destroy the current value.
1973 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001974 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001976 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001978 frame_->EmitPush(Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001979 }
Steve Block6ded16b2010-05-10 14:33:55 +01001980
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1982 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001983
Steve Blocka7e24c12009-10-30 11:49:00 +00001984 ASSERT(frame_->height() == original_height);
1985 return;
1986 }
1987
1988 ASSERT(!var->is_global());
1989
1990 // If we have a function or a constant, we need to initialize the variable.
1991 Expression* val = NULL;
1992 if (node->mode() == Variable::CONST) {
1993 val = new Literal(Factory::the_hole_value());
1994 } else {
1995 val = node->fun(); // NULL if we don't have a function
1996 }
1997
Steve Block8defd9f2010-07-08 12:39:36 +01001998
Steve Blocka7e24c12009-10-30 11:49:00 +00001999 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01002000 WriteBarrierCharacter wb_info =
2001 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2002 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01002003 // Set initial value.
2004 Reference target(this, node->proxy());
2005 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01002006 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01002007
Steve Blocka7e24c12009-10-30 11:49:00 +00002008 // Get rid of the assigned value (declarations are statements).
2009 frame_->Drop();
2010 }
2011 ASSERT(frame_->height() == original_height);
2012}
2013
2014
2015void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2016#ifdef DEBUG
2017 int original_height = frame_->height();
2018#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002019 Comment cmnt(masm_, "[ ExpressionStatement");
2020 CodeForStatementPosition(node);
2021 Expression* expression = node->expression();
2022 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002023 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00002024 frame_->Drop();
2025 ASSERT(frame_->height() == original_height);
2026}
2027
2028
2029void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2030#ifdef DEBUG
2031 int original_height = frame_->height();
2032#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 Comment cmnt(masm_, "// EmptyStatement");
2034 CodeForStatementPosition(node);
2035 // nothing to do
2036 ASSERT(frame_->height() == original_height);
2037}
2038
2039
2040void CodeGenerator::VisitIfStatement(IfStatement* node) {
2041#ifdef DEBUG
2042 int original_height = frame_->height();
2043#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002044 Comment cmnt(masm_, "[ IfStatement");
2045 // Generate different code depending on which parts of the if statement
2046 // are present or not.
2047 bool has_then_stm = node->HasThenStatement();
2048 bool has_else_stm = node->HasElseStatement();
2049
2050 CodeForStatementPosition(node);
2051
2052 JumpTarget exit;
2053 if (has_then_stm && has_else_stm) {
2054 Comment cmnt(masm_, "[ IfThenElse");
2055 JumpTarget then;
2056 JumpTarget else_;
2057 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002058 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002059 if (frame_ != NULL) {
2060 Branch(false, &else_);
2061 }
2062 // then
2063 if (frame_ != NULL || then.is_linked()) {
2064 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002065 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002066 }
2067 if (frame_ != NULL) {
2068 exit.Jump();
2069 }
2070 // else
2071 if (else_.is_linked()) {
2072 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002073 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002074 }
2075
2076 } else if (has_then_stm) {
2077 Comment cmnt(masm_, "[ IfThen");
2078 ASSERT(!has_else_stm);
2079 JumpTarget then;
2080 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002081 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002082 if (frame_ != NULL) {
2083 Branch(false, &exit);
2084 }
2085 // then
2086 if (frame_ != NULL || then.is_linked()) {
2087 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002088 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002089 }
2090
2091 } else if (has_else_stm) {
2092 Comment cmnt(masm_, "[ IfElse");
2093 ASSERT(!has_then_stm);
2094 JumpTarget else_;
2095 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002096 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002097 if (frame_ != NULL) {
2098 Branch(true, &exit);
2099 }
2100 // else
2101 if (frame_ != NULL || else_.is_linked()) {
2102 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002103 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002104 }
2105
2106 } else {
2107 Comment cmnt(masm_, "[ If");
2108 ASSERT(!has_then_stm && !has_else_stm);
2109 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002110 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002111 if (frame_ != NULL) {
2112 if (has_cc()) {
2113 cc_reg_ = al;
2114 } else {
2115 frame_->Drop();
2116 }
2117 }
2118 }
2119
2120 // end
2121 if (exit.is_linked()) {
2122 exit.Bind();
2123 }
2124 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2125}
2126
2127
2128void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002129 Comment cmnt(masm_, "[ ContinueStatement");
2130 CodeForStatementPosition(node);
2131 node->target()->continue_target()->Jump();
2132}
2133
2134
2135void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002136 Comment cmnt(masm_, "[ BreakStatement");
2137 CodeForStatementPosition(node);
2138 node->target()->break_target()->Jump();
2139}
2140
2141
2142void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002143 Comment cmnt(masm_, "[ ReturnStatement");
2144
2145 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002146 Load(node->expression());
Iain Merrick75681382010-08-19 15:07:18 +01002147 frame_->PopToR0();
2148 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +00002149 if (function_return_is_shadowed_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002150 function_return_.Jump();
2151 } else {
2152 // Pop the result from the frame and prepare the frame for
2153 // returning thus making it easier to merge.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002154 if (function_return_.is_bound()) {
2155 // If the function return label is already bound we reuse the
2156 // code by jumping to the return site.
2157 function_return_.Jump();
2158 } else {
2159 function_return_.Bind();
2160 GenerateReturnSequence();
2161 }
2162 }
2163}
Steve Blocka7e24c12009-10-30 11:49:00 +00002164
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002165
2166void CodeGenerator::GenerateReturnSequence() {
2167 if (FLAG_trace) {
2168 // Push the return value on the stack as the parameter.
2169 // Runtime::TraceExit returns the parameter as it is.
2170 frame_->EmitPush(r0);
2171 frame_->CallRuntime(Runtime::kTraceExit, 1);
2172 }
2173
2174#ifdef DEBUG
2175 // Add a label for checking the size of the code used for returning.
2176 Label check_exit_codesize;
2177 masm_->bind(&check_exit_codesize);
2178#endif
2179 // Make sure that the constant pool is not emitted inside of the return
2180 // sequence.
2181 { Assembler::BlockConstPoolScope block_const_pool(masm_);
2182 // Tear down the frame which will restore the caller's frame pointer and
2183 // the link register.
2184 frame_->Exit();
2185
2186 // Here we use masm_-> instead of the __ macro to avoid the code coverage
2187 // tool from instrumenting as we rely on the code size here.
2188 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
2189 masm_->add(sp, sp, Operand(sp_delta));
2190 masm_->Jump(lr);
2191 DeleteFrame();
2192
2193#ifdef DEBUG
2194 // Check that the size of the code used for returning matches what is
2195 // expected by the debugger. If the sp_delts above cannot be encoded in
2196 // the add instruction the add will generate two instructions.
2197 int return_sequence_length =
2198 masm_->InstructionsGeneratedSince(&check_exit_codesize);
2199 CHECK(return_sequence_length ==
2200 Assembler::kJSReturnSequenceInstructions ||
2201 return_sequence_length ==
2202 Assembler::kJSReturnSequenceInstructions + 1);
2203#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002204 }
2205}
2206
2207
2208void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2209#ifdef DEBUG
2210 int original_height = frame_->height();
2211#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002212 Comment cmnt(masm_, "[ WithEnterStatement");
2213 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002214 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002215 if (node->is_catch_block()) {
2216 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2217 } else {
2218 frame_->CallRuntime(Runtime::kPushContext, 1);
2219 }
2220#ifdef DEBUG
2221 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01002222 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002223 verified_true.Branch(eq);
2224 __ stop("PushContext: r0 is expected to be the same as cp");
2225 verified_true.Bind();
2226#endif
2227 // Update context local.
2228 __ str(cp, frame_->Context());
2229 ASSERT(frame_->height() == original_height);
2230}
2231
2232
2233void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2234#ifdef DEBUG
2235 int original_height = frame_->height();
2236#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002237 Comment cmnt(masm_, "[ WithExitStatement");
2238 CodeForStatementPosition(node);
2239 // Pop context.
2240 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2241 // Update context local.
2242 __ str(cp, frame_->Context());
2243 ASSERT(frame_->height() == original_height);
2244}
2245
2246
2247void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2248#ifdef DEBUG
2249 int original_height = frame_->height();
2250#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002251 Comment cmnt(masm_, "[ SwitchStatement");
2252 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002253 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002255 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002256
2257 JumpTarget next_test;
2258 JumpTarget fall_through;
2259 JumpTarget default_entry;
2260 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2261 ZoneList<CaseClause*>* cases = node->cases();
2262 int length = cases->length();
2263 CaseClause* default_clause = NULL;
2264
2265 for (int i = 0; i < length; i++) {
2266 CaseClause* clause = cases->at(i);
2267 if (clause->is_default()) {
2268 // Remember the default clause and compile it at the end.
2269 default_clause = clause;
2270 continue;
2271 }
2272
2273 Comment cmnt(masm_, "[ Case clause");
2274 // Compile the test.
2275 next_test.Bind();
2276 next_test.Unuse();
2277 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002278 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002279 Comparison(eq, NULL, clause->label(), true);
2280 Branch(false, &next_test);
2281
2282 // Before entering the body from the test, remove the switch value from
2283 // the stack.
2284 frame_->Drop();
2285
2286 // Label the body so that fall through is enabled.
2287 if (i > 0 && cases->at(i - 1)->is_default()) {
2288 default_exit.Bind();
2289 } else {
2290 fall_through.Bind();
2291 fall_through.Unuse();
2292 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002293 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002294
2295 // If control flow can fall through from the body, jump to the next body
2296 // or the end of the statement.
2297 if (frame_ != NULL) {
2298 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2299 default_entry.Jump();
2300 } else {
2301 fall_through.Jump();
2302 }
2303 }
2304 }
2305
2306 // The final "test" removes the switch value.
2307 next_test.Bind();
2308 frame_->Drop();
2309
2310 // If there is a default clause, compile it.
2311 if (default_clause != NULL) {
2312 Comment cmnt(masm_, "[ Default clause");
2313 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002314 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002315 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002316 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002317 if (frame_ != NULL && default_exit.is_bound()) {
2318 default_exit.Jump();
2319 }
2320 }
2321
2322 if (fall_through.is_linked()) {
2323 fall_through.Bind();
2324 }
2325
2326 if (node->break_target()->is_linked()) {
2327 node->break_target()->Bind();
2328 }
2329 node->break_target()->Unuse();
2330 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2331}
2332
2333
Steve Block3ce2e202009-11-05 08:53:23 +00002334void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002335#ifdef DEBUG
2336 int original_height = frame_->height();
2337#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002338 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002339 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002340 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002341 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002342 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002343
Steve Block3ce2e202009-11-05 08:53:23 +00002344 // Label the top of the loop for the backward CFG edge. If the test
2345 // is always true we can use the continue target, and if the test is
2346 // always false there is no need.
2347 ConditionAnalysis info = AnalyzeCondition(node->cond());
2348 switch (info) {
2349 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002350 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002351 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002352 break;
2353 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002354 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002355 break;
2356 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002357 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002358 body.Bind();
2359 break;
2360 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002361
Steve Block3ce2e202009-11-05 08:53:23 +00002362 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002363 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002364
Steve Blockd0582a62009-12-15 09:54:21 +00002365 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002366 switch (info) {
2367 case ALWAYS_TRUE:
2368 // If control can fall off the end of the body, jump back to the
2369 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002371 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002372 }
2373 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002374 case ALWAYS_FALSE:
2375 // If we have a continue in the body, we only have to bind its
2376 // jump target.
2377 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002378 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002379 }
Steve Block3ce2e202009-11-05 08:53:23 +00002380 break;
2381 case DONT_KNOW:
2382 // We have to compile the test expression if it can be reached by
2383 // control flow falling out of the body or via continue.
2384 if (node->continue_target()->is_linked()) {
2385 node->continue_target()->Bind();
2386 }
2387 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002388 Comment cmnt(masm_, "[ DoWhileCondition");
2389 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002390 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002391 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002392 // A invalid frame here indicates that control did not
2393 // fall out of the test expression.
2394 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002395 }
2396 }
2397 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 }
2399
2400 if (node->break_target()->is_linked()) {
2401 node->break_target()->Bind();
2402 }
Steve Block6ded16b2010-05-10 14:33:55 +01002403 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002404 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2405}
2406
2407
2408void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2409#ifdef DEBUG
2410 int original_height = frame_->height();
2411#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002412 Comment cmnt(masm_, "[ WhileStatement");
2413 CodeForStatementPosition(node);
2414
2415 // If the test is never true and has no side effects there is no need
2416 // to compile the test or body.
2417 ConditionAnalysis info = AnalyzeCondition(node->cond());
2418 if (info == ALWAYS_FALSE) return;
2419
Kristian Monsen25f61362010-05-21 11:50:48 +01002420 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002421 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002422
2423 // Label the top of the loop with the continue target for the backward
2424 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002425 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002426 node->continue_target()->Bind();
2427
2428 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002429 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002430 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002431 if (has_valid_frame()) {
2432 // A NULL frame indicates that control did not fall out of the
2433 // test expression.
2434 Branch(false, node->break_target());
2435 }
2436 if (has_valid_frame() || body.is_linked()) {
2437 body.Bind();
2438 }
2439 }
2440
2441 if (has_valid_frame()) {
2442 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002443 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002444
2445 // If control flow can fall out of the body, jump back to the top.
2446 if (has_valid_frame()) {
2447 node->continue_target()->Jump();
2448 }
2449 }
2450 if (node->break_target()->is_linked()) {
2451 node->break_target()->Bind();
2452 }
Steve Block6ded16b2010-05-10 14:33:55 +01002453 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002454 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2455}
2456
2457
2458void CodeGenerator::VisitForStatement(ForStatement* node) {
2459#ifdef DEBUG
2460 int original_height = frame_->height();
2461#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002462 Comment cmnt(masm_, "[ ForStatement");
2463 CodeForStatementPosition(node);
2464 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002465 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002466 }
2467
2468 // If the test is never true there is no need to compile the test or
2469 // body.
2470 ConditionAnalysis info = AnalyzeCondition(node->cond());
2471 if (info == ALWAYS_FALSE) return;
2472
Kristian Monsen25f61362010-05-21 11:50:48 +01002473 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002474 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002475
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002476 // We know that the loop index is a smi if it is not modified in the
2477 // loop body and it is checked against a constant limit in the loop
2478 // condition. In this case, we reset the static type information of the
2479 // loop index to smi before compiling the body, the update expression, and
2480 // the bottom check of the loop condition.
2481 TypeInfoCodeGenState type_info_scope(this,
2482 node->is_fast_smi_loop() ?
2483 node->loop_variable()->slot() :
2484 NULL,
2485 TypeInfo::Smi());
2486
Steve Block3ce2e202009-11-05 08:53:23 +00002487 // If there is no update statement, label the top of the loop with the
2488 // continue target, otherwise with the loop target.
2489 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2490 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002491 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002492 node->continue_target()->Bind();
2493 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002494 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002495 loop.Bind();
2496 }
2497
2498 // If the test is always true, there is no need to compile it.
2499 if (info == DONT_KNOW) {
2500 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002501 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002502 if (has_valid_frame()) {
2503 Branch(false, node->break_target());
2504 }
2505 if (has_valid_frame() || body.is_linked()) {
2506 body.Bind();
2507 }
2508 }
2509
2510 if (has_valid_frame()) {
2511 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002512 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002513
2514 if (node->next() == NULL) {
2515 // If there is no update statement and control flow can fall out
2516 // of the loop, jump directly to the continue label.
2517 if (has_valid_frame()) {
2518 node->continue_target()->Jump();
2519 }
2520 } else {
2521 // If there is an update statement and control flow can reach it
2522 // via falling out of the body of the loop or continuing, we
2523 // compile the update statement.
2524 if (node->continue_target()->is_linked()) {
2525 node->continue_target()->Bind();
2526 }
2527 if (has_valid_frame()) {
2528 // Record source position of the statement as this code which is
2529 // after the code for the body actually belongs to the loop
2530 // statement and not the body.
2531 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002532 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002533 loop.Jump();
2534 }
2535 }
2536 }
2537 if (node->break_target()->is_linked()) {
2538 node->break_target()->Bind();
2539 }
Steve Block6ded16b2010-05-10 14:33:55 +01002540 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002541 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2542}
2543
2544
2545void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2546#ifdef DEBUG
2547 int original_height = frame_->height();
2548#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002549 Comment cmnt(masm_, "[ ForInStatement");
2550 CodeForStatementPosition(node);
2551
2552 JumpTarget primitive;
2553 JumpTarget jsobject;
2554 JumpTarget fixed_array;
2555 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2556 JumpTarget end_del_check;
2557 JumpTarget exit;
2558
2559 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002560 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002561
Iain Merrick75681382010-08-19 15:07:18 +01002562 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002563 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2564 // to the specification. 12.6.4 mandates a call to ToObject.
2565 frame_->EmitPop(r0);
2566 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2567 __ cmp(r0, ip);
2568 exit.Branch(eq);
2569 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2570 __ cmp(r0, ip);
2571 exit.Branch(eq);
2572
2573 // Stack layout in body:
2574 // [iteration counter (Smi)]
2575 // [length of array]
2576 // [FixedArray]
2577 // [Map or 0]
2578 // [Object]
2579
2580 // Check if enumerable is already a JSObject
2581 __ tst(r0, Operand(kSmiTagMask));
2582 primitive.Branch(eq);
2583 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2584 jsobject.Branch(hs);
2585
2586 primitive.Bind();
2587 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002588 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002589
2590 jsobject.Bind();
2591 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002592 // r0: value to be iterated over
2593 frame_->EmitPush(r0); // Push the object being iterated over.
2594
2595 // Check cache validity in generated code. This is a fast case for
2596 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2597 // guarantee cache validity, call the runtime system to check cache
2598 // validity or get the property names in a fixed array.
2599 JumpTarget call_runtime;
2600 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2601 JumpTarget check_prototype;
2602 JumpTarget use_cache;
2603 __ mov(r1, Operand(r0));
2604 loop.Bind();
2605 // Check that there are no elements.
2606 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2607 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2608 __ cmp(r2, r4);
2609 call_runtime.Branch(ne);
2610 // Check that instance descriptors are not empty so that we can
2611 // check for an enum cache. Leave the map in r3 for the subsequent
2612 // prototype load.
2613 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2614 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2615 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2616 __ cmp(r2, ip);
2617 call_runtime.Branch(eq);
2618 // Check that there in an enum cache in the non-empty instance
2619 // descriptors. This is the case if the next enumeration index
2620 // field does not contain a smi.
2621 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2622 __ tst(r2, Operand(kSmiTagMask));
2623 call_runtime.Branch(eq);
2624 // For all objects but the receiver, check that the cache is empty.
2625 // r4: empty fixed array root.
2626 __ cmp(r1, r0);
2627 check_prototype.Branch(eq);
2628 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2629 __ cmp(r2, r4);
2630 call_runtime.Branch(ne);
2631 check_prototype.Bind();
2632 // Load the prototype from the map and loop if non-null.
2633 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2634 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2635 __ cmp(r1, ip);
2636 loop.Branch(ne);
2637 // The enum cache is valid. Load the map of the object being
2638 // iterated over and use the cache for the iteration.
2639 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2640 use_cache.Jump();
2641
2642 call_runtime.Bind();
2643 // Call the runtime to get the property names for the object.
2644 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002645 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2646
Steve Blockd0582a62009-12-15 09:54:21 +00002647 // If we got a map from the runtime call, we can do a fast
2648 // modification check. Otherwise, we got a fixed array, and we have
2649 // to do a slow check.
2650 // r0: map or fixed array (result from call to
2651 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002652 __ mov(r2, Operand(r0));
2653 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2654 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2655 __ cmp(r1, ip);
2656 fixed_array.Branch(ne);
2657
Steve Blockd0582a62009-12-15 09:54:21 +00002658 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002659 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002660 // r0: map (either the result from a call to
2661 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2662 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002663 __ mov(r1, Operand(r0));
2664 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2665 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2666 __ ldr(r2,
2667 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2668
2669 frame_->EmitPush(r0); // map
2670 frame_->EmitPush(r2); // enum cache bridge cache
2671 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002672 frame_->EmitPush(r0);
2673 __ mov(r0, Operand(Smi::FromInt(0)));
2674 frame_->EmitPush(r0);
2675 entry.Jump();
2676
2677 fixed_array.Bind();
2678 __ mov(r1, Operand(Smi::FromInt(0)));
2679 frame_->EmitPush(r1); // insert 0 in place of Map
2680 frame_->EmitPush(r0);
2681
2682 // Push the length of the array and the initial index onto the stack.
2683 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002684 frame_->EmitPush(r0);
2685 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2686 frame_->EmitPush(r0);
2687
2688 // Condition.
2689 entry.Bind();
2690 // sp[0] : index
2691 // sp[1] : array/enum cache length
2692 // sp[2] : array or enum cache
2693 // sp[3] : 0 or map
2694 // sp[4] : enumerable
2695 // Grab the current frame's height for the break and continue
2696 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002697 node->break_target()->SetExpectedHeight();
2698 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002699
Kristian Monsen25f61362010-05-21 11:50:48 +01002700 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002701 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002702 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002703 node->break_target()->Branch(hs);
2704
Steve Blocka7e24c12009-10-30 11:49:00 +00002705 // Get the i'th entry of the array.
2706 __ ldr(r2, frame_->ElementAt(2));
2707 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2708 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2709
2710 // Get Map or 0.
2711 __ ldr(r2, frame_->ElementAt(3));
2712 // Check if this (still) matches the map of the enumerable.
2713 // If not, we have to filter the key.
2714 __ ldr(r1, frame_->ElementAt(4));
2715 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2716 __ cmp(r1, Operand(r2));
2717 end_del_check.Branch(eq);
2718
2719 // Convert the entry to a string (or null if it isn't a property anymore).
2720 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2721 frame_->EmitPush(r0);
2722 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002723 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Iain Merrick75681382010-08-19 15:07:18 +01002724 __ mov(r3, Operand(r0), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002725 // If the property has been removed while iterating, we just skip it.
Steve Blocka7e24c12009-10-30 11:49:00 +00002726 node->continue_target()->Branch(eq);
2727
2728 end_del_check.Bind();
2729 // Store the entry in the 'each' expression and take another spin in the
2730 // loop. r3: i'th entry of the enum cache (or string there of)
2731 frame_->EmitPush(r3); // push entry
Iain Merrick75681382010-08-19 15:07:18 +01002732 { VirtualFrame::RegisterAllocationScope scope(this);
2733 Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00002734 if (!each.is_illegal()) {
2735 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01002736 // Loading a reference may leave the frame in an unspilled state.
2737 frame_->SpillAll(); // Sync stack to memory.
2738 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00002739 __ ldr(r0, frame_->ElementAt(each.size()));
2740 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002741 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002742 frame_->Drop(2); // The result of the set and the extra pushed value.
Leon Clarked91b9f72010-01-27 17:25:45 +00002743 } else {
2744 // If the reference was to a slot we rely on the convenient property
Iain Merrick75681382010-08-19 15:07:18 +01002745 // that it doesn't matter whether a value (eg, ebx pushed above) is
Leon Clarked91b9f72010-01-27 17:25:45 +00002746 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002747 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002748 frame_->Drop(1); // Drop the result of the set operation.
Steve Blocka7e24c12009-10-30 11:49:00 +00002749 }
2750 }
2751 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002752 // Body.
2753 CheckStack(); // TODO(1222600): ignore if body contains calls.
Iain Merrick75681382010-08-19 15:07:18 +01002754 { VirtualFrame::RegisterAllocationScope scope(this);
2755 Visit(node->body());
2756 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002757
2758 // Next. Reestablish a spilled frame in case we are coming here via
2759 // a continue in the body.
2760 node->continue_target()->Bind();
2761 frame_->SpillAll();
2762 frame_->EmitPop(r0);
2763 __ add(r0, r0, Operand(Smi::FromInt(1)));
2764 frame_->EmitPush(r0);
2765 entry.Jump();
2766
2767 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2768 // any frame.
2769 node->break_target()->Bind();
2770 frame_->Drop(5);
2771
2772 // Exit.
2773 exit.Bind();
2774 node->continue_target()->Unuse();
2775 node->break_target()->Unuse();
2776 ASSERT(frame_->height() == original_height);
2777}
2778
2779
Steve Block3ce2e202009-11-05 08:53:23 +00002780void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002781#ifdef DEBUG
2782 int original_height = frame_->height();
2783#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002784 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002785 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002786 CodeForStatementPosition(node);
2787
2788 JumpTarget try_block;
2789 JumpTarget exit;
2790
2791 try_block.Call();
2792 // --- Catch block ---
2793 frame_->EmitPush(r0);
2794
2795 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002796 Variable* catch_var = node->catch_var()->var();
2797 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
2798 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002799
2800 // Remove the exception from the stack.
2801 frame_->Drop();
2802
Iain Merrick75681382010-08-19 15:07:18 +01002803 { VirtualFrame::RegisterAllocationScope scope(this);
2804 VisitStatements(node->catch_block()->statements());
2805 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002806 if (frame_ != NULL) {
2807 exit.Jump();
2808 }
2809
2810
2811 // --- Try block ---
2812 try_block.Bind();
2813
2814 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2815 int handler_height = frame_->height();
2816
2817 // Shadow the labels for all escapes from the try block, including
2818 // returns. During shadowing, the original label is hidden as the
2819 // LabelShadow and operations on the original actually affect the
2820 // shadowing label.
2821 //
2822 // We should probably try to unify the escaping labels and the return
2823 // label.
2824 int nof_escapes = node->escaping_targets()->length();
2825 List<ShadowTarget*> shadows(1 + nof_escapes);
2826
2827 // Add the shadow target for the function return.
2828 static const int kReturnShadowIndex = 0;
2829 shadows.Add(new ShadowTarget(&function_return_));
2830 bool function_return_was_shadowed = function_return_is_shadowed_;
2831 function_return_is_shadowed_ = true;
2832 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2833
2834 // Add the remaining shadow targets.
2835 for (int i = 0; i < nof_escapes; i++) {
2836 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2837 }
2838
2839 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002840 { VirtualFrame::RegisterAllocationScope scope(this);
2841 VisitStatements(node->try_block()->statements());
2842 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002843
2844 // Stop the introduced shadowing and count the number of required unlinks.
2845 // After shadowing stops, the original labels are unshadowed and the
2846 // LabelShadows represent the formerly shadowing labels.
2847 bool has_unlinks = false;
2848 for (int i = 0; i < shadows.length(); i++) {
2849 shadows[i]->StopShadowing();
2850 has_unlinks = has_unlinks || shadows[i]->is_linked();
2851 }
2852 function_return_is_shadowed_ = function_return_was_shadowed;
2853
2854 // Get an external reference to the handler address.
2855 ExternalReference handler_address(Top::k_handler_address);
2856
2857 // If we can fall off the end of the try block, unlink from try chain.
2858 if (has_valid_frame()) {
2859 // The next handler address is on top of the frame. Unlink from
2860 // the handler list and drop the rest of this handler from the
2861 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002862 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002863 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002864 __ mov(r3, Operand(handler_address));
2865 __ str(r1, MemOperand(r3));
2866 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2867 if (has_unlinks) {
2868 exit.Jump();
2869 }
2870 }
2871
2872 // Generate unlink code for the (formerly) shadowing labels that have been
2873 // jumped to. Deallocate each shadow target.
2874 for (int i = 0; i < shadows.length(); i++) {
2875 if (shadows[i]->is_linked()) {
2876 // Unlink from try chain;
2877 shadows[i]->Bind();
2878 // Because we can be jumping here (to spilled code) from unspilled
2879 // code, we need to reestablish a spilled frame at this block.
2880 frame_->SpillAll();
2881
2882 // Reload sp from the top handler, because some statements that we
2883 // break from (eg, for...in) may have left stuff on the stack.
2884 __ mov(r3, Operand(handler_address));
2885 __ ldr(sp, MemOperand(r3));
2886 frame_->Forget(frame_->height() - handler_height);
2887
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002888 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002889 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002890 __ str(r1, MemOperand(r3));
2891 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2892
2893 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2894 frame_->PrepareForReturn();
2895 }
2896 shadows[i]->other_target()->Jump();
2897 }
2898 }
2899
2900 exit.Bind();
2901 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2902}
2903
2904
Steve Block3ce2e202009-11-05 08:53:23 +00002905void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002906#ifdef DEBUG
2907 int original_height = frame_->height();
2908#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002909 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002910 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002911 CodeForStatementPosition(node);
2912
2913 // State: Used to keep track of reason for entering the finally
2914 // block. Should probably be extended to hold information for
2915 // break/continue from within the try block.
2916 enum { FALLING, THROWING, JUMPING };
2917
2918 JumpTarget try_block;
2919 JumpTarget finally_block;
2920
2921 try_block.Call();
2922
2923 frame_->EmitPush(r0); // save exception object on the stack
2924 // In case of thrown exceptions, this is where we continue.
2925 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2926 finally_block.Jump();
2927
2928 // --- Try block ---
2929 try_block.Bind();
2930
2931 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2932 int handler_height = frame_->height();
2933
2934 // Shadow the labels for all escapes from the try block, including
2935 // returns. Shadowing hides the original label as the LabelShadow and
2936 // operations on the original actually affect the shadowing label.
2937 //
2938 // We should probably try to unify the escaping labels and the return
2939 // label.
2940 int nof_escapes = node->escaping_targets()->length();
2941 List<ShadowTarget*> shadows(1 + nof_escapes);
2942
2943 // Add the shadow target for the function return.
2944 static const int kReturnShadowIndex = 0;
2945 shadows.Add(new ShadowTarget(&function_return_));
2946 bool function_return_was_shadowed = function_return_is_shadowed_;
2947 function_return_is_shadowed_ = true;
2948 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2949
2950 // Add the remaining shadow targets.
2951 for (int i = 0; i < nof_escapes; i++) {
2952 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2953 }
2954
2955 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002956 { VirtualFrame::RegisterAllocationScope scope(this);
2957 VisitStatements(node->try_block()->statements());
2958 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002959
2960 // Stop the introduced shadowing and count the number of required unlinks.
2961 // After shadowing stops, the original labels are unshadowed and the
2962 // LabelShadows represent the formerly shadowing labels.
2963 int nof_unlinks = 0;
2964 for (int i = 0; i < shadows.length(); i++) {
2965 shadows[i]->StopShadowing();
2966 if (shadows[i]->is_linked()) nof_unlinks++;
2967 }
2968 function_return_is_shadowed_ = function_return_was_shadowed;
2969
2970 // Get an external reference to the handler address.
2971 ExternalReference handler_address(Top::k_handler_address);
2972
2973 // If we can fall off the end of the try block, unlink from the try
2974 // chain and set the state on the frame to FALLING.
2975 if (has_valid_frame()) {
2976 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002977 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002978 frame_->EmitPop(r1);
2979 __ mov(r3, Operand(handler_address));
2980 __ str(r1, MemOperand(r3));
2981 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2982
2983 // Fake a top of stack value (unneeded when FALLING) and set the
2984 // state in r2, then jump around the unlink blocks if any.
2985 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2986 frame_->EmitPush(r0);
2987 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2988 if (nof_unlinks > 0) {
2989 finally_block.Jump();
2990 }
2991 }
2992
2993 // Generate code to unlink and set the state for the (formerly)
2994 // shadowing targets that have been jumped to.
2995 for (int i = 0; i < shadows.length(); i++) {
2996 if (shadows[i]->is_linked()) {
2997 // If we have come from the shadowed return, the return value is
2998 // in (a non-refcounted reference to) r0. We must preserve it
2999 // until it is pushed.
3000 //
3001 // Because we can be jumping here (to spilled code) from
3002 // unspilled code, we need to reestablish a spilled frame at
3003 // this block.
3004 shadows[i]->Bind();
3005 frame_->SpillAll();
3006
3007 // Reload sp from the top handler, because some statements that
3008 // we break from (eg, for...in) may have left stuff on the
3009 // stack.
3010 __ mov(r3, Operand(handler_address));
3011 __ ldr(sp, MemOperand(r3));
3012 frame_->Forget(frame_->height() - handler_height);
3013
3014 // Unlink this handler and drop it from the frame. The next
3015 // handler address is currently on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003016 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003017 frame_->EmitPop(r1);
3018 __ str(r1, MemOperand(r3));
3019 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3020
3021 if (i == kReturnShadowIndex) {
3022 // If this label shadowed the function return, materialize the
3023 // return value on the stack.
3024 frame_->EmitPush(r0);
3025 } else {
3026 // Fake TOS for targets that shadowed breaks and continues.
3027 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3028 frame_->EmitPush(r0);
3029 }
3030 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
3031 if (--nof_unlinks > 0) {
3032 // If this is not the last unlink block, jump around the next.
3033 finally_block.Jump();
3034 }
3035 }
3036 }
3037
3038 // --- Finally block ---
3039 finally_block.Bind();
3040
3041 // Push the state on the stack.
3042 frame_->EmitPush(r2);
3043
3044 // We keep two elements on the stack - the (possibly faked) result
3045 // and the state - while evaluating the finally block.
3046 //
3047 // Generate code for the statements in the finally block.
Iain Merrick75681382010-08-19 15:07:18 +01003048 { VirtualFrame::RegisterAllocationScope scope(this);
3049 VisitStatements(node->finally_block()->statements());
3050 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003051
3052 if (has_valid_frame()) {
3053 // Restore state and return value or faked TOS.
3054 frame_->EmitPop(r2);
3055 frame_->EmitPop(r0);
3056 }
3057
3058 // Generate code to jump to the right destination for all used
3059 // formerly shadowing targets. Deallocate each shadow target.
3060 for (int i = 0; i < shadows.length(); i++) {
3061 if (has_valid_frame() && shadows[i]->is_bound()) {
3062 JumpTarget* original = shadows[i]->other_target();
3063 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
3064 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
3065 JumpTarget skip;
3066 skip.Branch(ne);
3067 frame_->PrepareForReturn();
3068 original->Jump();
3069 skip.Bind();
3070 } else {
3071 original->Branch(eq);
3072 }
3073 }
3074 }
3075
3076 if (has_valid_frame()) {
3077 // Check if we need to rethrow the exception.
3078 JumpTarget exit;
3079 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
3080 exit.Branch(ne);
3081
3082 // Rethrow exception.
3083 frame_->EmitPush(r0);
3084 frame_->CallRuntime(Runtime::kReThrow, 1);
3085
3086 // Done.
3087 exit.Bind();
3088 }
3089 ASSERT(!has_valid_frame() || frame_->height() == original_height);
3090}
3091
3092
3093void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3094#ifdef DEBUG
3095 int original_height = frame_->height();
3096#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003097 Comment cmnt(masm_, "[ DebuggerStatament");
3098 CodeForStatementPosition(node);
3099#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00003100 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00003101#endif
3102 // Ignore the return value.
3103 ASSERT(frame_->height() == original_height);
3104}
3105
3106
Steve Block6ded16b2010-05-10 14:33:55 +01003107void CodeGenerator::InstantiateFunction(
3108 Handle<SharedFunctionInfo> function_info) {
Leon Clarkee46be812010-01-19 14:06:41 +00003109 // Use the fast case closure allocation code that allocates in new
3110 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01003111 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00003112 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003113 frame_->EmitPush(Operand(function_info));
3114 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003115 frame_->CallStub(&stub, 1);
3116 frame_->EmitPush(r0);
3117 } else {
3118 // Create a new closure.
3119 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003120 frame_->EmitPush(Operand(function_info));
Leon Clarkee46be812010-01-19 14:06:41 +00003121 frame_->CallRuntime(Runtime::kNewClosure, 2);
3122 frame_->EmitPush(r0);
3123 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003124}
3125
3126
3127void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3128#ifdef DEBUG
3129 int original_height = frame_->height();
3130#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003131 Comment cmnt(masm_, "[ FunctionLiteral");
3132
Steve Block6ded16b2010-05-10 14:33:55 +01003133 // Build the function info and instantiate it.
3134 Handle<SharedFunctionInfo> function_info =
3135 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00003136 // Check for stack-overflow exception.
3137 if (HasStackOverflow()) {
3138 ASSERT(frame_->height() == original_height);
3139 return;
3140 }
Steve Block6ded16b2010-05-10 14:33:55 +01003141 InstantiateFunction(function_info);
3142 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003143}
3144
3145
Steve Block6ded16b2010-05-10 14:33:55 +01003146void CodeGenerator::VisitSharedFunctionInfoLiteral(
3147 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003148#ifdef DEBUG
3149 int original_height = frame_->height();
3150#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003151 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
3152 InstantiateFunction(node->shared_function_info());
3153 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003154}
3155
3156
3157void CodeGenerator::VisitConditional(Conditional* node) {
3158#ifdef DEBUG
3159 int original_height = frame_->height();
3160#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003161 Comment cmnt(masm_, "[ Conditional");
3162 JumpTarget then;
3163 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003164 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003165 if (has_valid_frame()) {
3166 Branch(false, &else_);
3167 }
3168 if (has_valid_frame() || then.is_linked()) {
3169 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003170 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003171 }
3172 if (else_.is_linked()) {
3173 JumpTarget exit;
3174 if (has_valid_frame()) exit.Jump();
3175 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003176 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003177 if (exit.is_linked()) exit.Bind();
3178 }
Steve Block6ded16b2010-05-10 14:33:55 +01003179 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003180}
3181
3182
3183void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003184 if (slot->type() == Slot::LOOKUP) {
3185 ASSERT(slot->var()->is_dynamic());
3186
Steve Block6ded16b2010-05-10 14:33:55 +01003187 // JumpTargets do not yet support merging frames so the frame must be
3188 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00003189 JumpTarget slow;
3190 JumpTarget done;
3191
Kristian Monsen25f61362010-05-21 11:50:48 +01003192 // Generate fast case for loading from slots that correspond to
3193 // local/global variables or arguments unless they are shadowed by
3194 // eval-introduced bindings.
3195 EmitDynamicLoadFromSlotFastCase(slot,
3196 typeof_state,
3197 &slow,
3198 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003199
3200 slow.Bind();
3201 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003202 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003203
3204 if (typeof_state == INSIDE_TYPEOF) {
3205 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3206 } else {
3207 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3208 }
3209
3210 done.Bind();
3211 frame_->EmitPush(r0);
3212
3213 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003214 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003215 TypeInfo info = type_info(slot);
3216 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01003217
Steve Blocka7e24c12009-10-30 11:49:00 +00003218 if (slot->var()->mode() == Variable::CONST) {
3219 // Const slots may contain 'the hole' value (the constant hasn't been
3220 // initialized yet) which needs to be converted into the 'undefined'
3221 // value.
3222 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01003223 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003224 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003225 __ cmp(tos, ip);
3226 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3227 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00003228 }
3229 }
3230}
3231
3232
Steve Block6ded16b2010-05-10 14:33:55 +01003233void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3234 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01003235 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01003236 LoadFromSlot(slot, state);
3237
3238 // Bail out quickly if we're not using lazy arguments allocation.
3239 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3240
3241 // ... or if the slot isn't a non-parameter arguments slot.
3242 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3243
Steve Block8defd9f2010-07-08 12:39:36 +01003244 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01003245 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01003246 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01003247
3248 // If the loaded value is the sentinel that indicates that we
3249 // haven't loaded the arguments object yet, we need to do it now.
3250 JumpTarget exit;
3251 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003252 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003253 exit.Branch(ne);
3254 frame_->Drop();
3255 StoreArgumentsObject(false);
3256 exit.Bind();
3257}
3258
3259
Leon Clarkee46be812010-01-19 14:06:41 +00003260void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3261 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003262 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003263 if (slot->type() == Slot::LOOKUP) {
3264 ASSERT(slot->var()->is_dynamic());
3265
3266 // For now, just do a runtime call.
3267 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003268 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003269
3270 if (init_state == CONST_INIT) {
3271 // Same as the case for a normal store, but ignores attribute
3272 // (e.g. READ_ONLY) of context slot so that we can initialize
3273 // const properties (introduced via eval("const foo = (some
3274 // expr);")). Also, uses the current function context instead of
3275 // the top context.
3276 //
3277 // Note that we must declare the foo upon entry of eval(), via a
3278 // context slot declaration, but we cannot initialize it at the
3279 // same time, because the const declaration may be at the end of
3280 // the eval code (sigh...) and the const variable may have been
3281 // used before (where its value is 'undefined'). Thus, we can only
3282 // do the initialization when we actually encounter the expression
3283 // and when the expression operands are defined and valid, and
3284 // thus we need the split into 2 operations: declaration of the
3285 // context slot followed by initialization.
3286 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3287 } else {
3288 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3289 }
3290 // Storing a variable must keep the (new) value on the expression
3291 // stack. This is necessary for compiling assignment expressions.
3292 frame_->EmitPush(r0);
3293
3294 } else {
3295 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003296 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003297 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003298
Steve Block6ded16b2010-05-10 14:33:55 +01003299 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003300 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003301
Leon Clarkee46be812010-01-19 14:06:41 +00003302 if (init_state == CONST_INIT) {
3303 ASSERT(slot->var()->mode() == Variable::CONST);
3304 // Only the first const initialization must be executed (the slot
3305 // still contains 'the hole' value). When the assignment is
3306 // executed, the code is identical to a normal store (see below).
3307 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003308 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003309 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003310 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003311 exit.Branch(ne);
3312 }
3313
3314 // We must execute the store. Storing a variable must keep the
3315 // (new) value on the stack. This is necessary for compiling
3316 // assignment expressions.
3317 //
3318 // Note: We will reach here even with slot->var()->mode() ==
3319 // Variable::CONST because of const declarations which will
3320 // initialize consts to 'the hole' value and by doing so, end up
3321 // calling this code. r2 may be loaded with context; used below in
3322 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003323 Register tos = frame_->Peek();
3324 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003325 if (slot->type() == Slot::CONTEXT) {
3326 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003327 __ tst(tos, Operand(kSmiTagMask));
3328 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003329 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003330 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003331 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003332 // We need an extra register. Until we have a way to do that in the
3333 // virtual frame we will cheat and ask for a free TOS register.
3334 Register scratch3 = frame_->GetTOSRegister();
3335 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003336 }
3337 // If we definitely did not jump over the assignment, we do not need
3338 // to bind the exit label. Doing so can defeat peephole
3339 // optimization.
3340 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3341 exit.Bind();
3342 }
3343 }
3344}
3345
3346
Steve Blocka7e24c12009-10-30 11:49:00 +00003347void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3348 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003349 JumpTarget* slow) {
3350 // Check that no extension objects have been created by calls to
3351 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003352 Register tmp = frame_->scratch0();
3353 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003354 Register context = cp;
3355 Scope* s = scope();
3356 while (s != NULL) {
3357 if (s->num_heap_slots() > 0) {
3358 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003359 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003360 // Check that extension is NULL.
3361 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3362 __ tst(tmp2, tmp2);
3363 slow->Branch(ne);
3364 }
3365 // Load next context in chain.
3366 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3367 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3368 context = tmp;
3369 }
3370 // If no outer scope calls eval, we do not need to check more
3371 // context extensions.
3372 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3373 s = s->outer_scope();
3374 }
3375
3376 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003377 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003378 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003379 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003380 __ bind(&next);
3381 // Terminate at global context.
3382 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3383 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3384 __ cmp(tmp2, ip);
3385 __ b(eq, &fast);
3386 // Check that extension is NULL.
3387 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3388 __ tst(tmp2, tmp2);
3389 slow->Branch(ne);
3390 // Load next context in chain.
3391 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3392 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3393 __ b(&next);
3394 __ bind(&fast);
3395 }
3396
Steve Blocka7e24c12009-10-30 11:49:00 +00003397 // Load the global object.
3398 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003399 // Setup the name register and call load IC.
3400 frame_->CallLoadIC(slot->var()->name(),
3401 typeof_state == INSIDE_TYPEOF
3402 ? RelocInfo::CODE_TARGET
3403 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003404}
3405
3406
Kristian Monsen25f61362010-05-21 11:50:48 +01003407void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3408 TypeofState typeof_state,
3409 JumpTarget* slow,
3410 JumpTarget* done) {
3411 // Generate fast-case code for variables that might be shadowed by
3412 // eval-introduced variables. Eval is used a lot without
3413 // introducing variables. In those cases, we do not want to
3414 // perform a runtime call for all variables in the scope
3415 // containing the eval.
3416 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3417 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3418 frame_->SpillAll();
3419 done->Jump();
3420
3421 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3422 frame_->SpillAll();
3423 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
3424 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3425 if (potential_slot != NULL) {
3426 // Generate fast case for locals that rewrite to slots.
3427 __ ldr(r0,
3428 ContextSlotOperandCheckExtensions(potential_slot,
3429 r1,
3430 r2,
3431 slow));
3432 if (potential_slot->var()->mode() == Variable::CONST) {
3433 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3434 __ cmp(r0, ip);
3435 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3436 }
3437 done->Jump();
3438 } else if (rewrite != NULL) {
3439 // Generate fast case for argument loads.
3440 Property* property = rewrite->AsProperty();
3441 if (property != NULL) {
3442 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3443 Literal* key_literal = property->key()->AsLiteral();
3444 if (obj_proxy != NULL &&
3445 key_literal != NULL &&
3446 obj_proxy->IsArguments() &&
3447 key_literal->handle()->IsSmi()) {
3448 // Load arguments object if there are no eval-introduced
3449 // variables. Then load the argument from the arguments
3450 // object using keyed load.
3451 __ ldr(r0,
3452 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
3453 r1,
3454 r2,
3455 slow));
3456 frame_->EmitPush(r0);
3457 __ mov(r1, Operand(key_literal->handle()));
3458 frame_->EmitPush(r1);
3459 EmitKeyedLoad();
3460 done->Jump();
3461 }
3462 }
3463 }
3464 }
3465}
3466
3467
Steve Blocka7e24c12009-10-30 11:49:00 +00003468void CodeGenerator::VisitSlot(Slot* node) {
3469#ifdef DEBUG
3470 int original_height = frame_->height();
3471#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003472 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003473 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3474 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003475}
3476
3477
3478void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3479#ifdef DEBUG
3480 int original_height = frame_->height();
3481#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003482 Comment cmnt(masm_, "[ VariableProxy");
3483
3484 Variable* var = node->var();
3485 Expression* expr = var->rewrite();
3486 if (expr != NULL) {
3487 Visit(expr);
3488 } else {
3489 ASSERT(var->is_global());
3490 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003491 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003492 }
Steve Block6ded16b2010-05-10 14:33:55 +01003493 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003494}
3495
3496
3497void CodeGenerator::VisitLiteral(Literal* node) {
3498#ifdef DEBUG
3499 int original_height = frame_->height();
3500#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003501 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003502 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003503 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003504 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003505 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003506 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003507}
3508
3509
3510void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3511#ifdef DEBUG
3512 int original_height = frame_->height();
3513#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003514 Comment cmnt(masm_, "[ RexExp Literal");
3515
Steve Block8defd9f2010-07-08 12:39:36 +01003516 Register tmp = VirtualFrame::scratch0();
3517 // Free up a TOS register that can be used to push the literal.
3518 Register literal = frame_->GetTOSRegister();
3519
Steve Blocka7e24c12009-10-30 11:49:00 +00003520 // Retrieve the literal array and check the allocated entry.
3521
3522 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003523 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003524
3525 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003526 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003527
3528 // Load the literal at the ast saved index.
3529 int literal_offset =
3530 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003531 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003532
Ben Murdochbb769b22010-08-11 14:56:33 +01003533 JumpTarget materialized;
Steve Blocka7e24c12009-10-30 11:49:00 +00003534 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003535 __ cmp(literal, ip);
3536 // This branch locks the virtual frame at the done label to match the
3537 // one we have here, where the literal register is not on the stack and
3538 // nothing is spilled.
Ben Murdochbb769b22010-08-11 14:56:33 +01003539 materialized.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00003540
Steve Block8defd9f2010-07-08 12:39:36 +01003541 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003542 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003543 // literal array (0)
3544 frame_->EmitPush(tmp);
3545 // literal index (1)
3546 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3547 // RegExp pattern (2)
3548 frame_->EmitPush(Operand(node->pattern()));
3549 // RegExp flags (3)
3550 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003551 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003552 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003553
Ben Murdochbb769b22010-08-11 14:56:33 +01003554 materialized.Bind();
3555
Steve Block8defd9f2010-07-08 12:39:36 +01003556 frame_->EmitPush(literal);
Ben Murdochbb769b22010-08-11 14:56:33 +01003557 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3558 frame_->EmitPush(Operand(Smi::FromInt(size)));
3559 frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
3560 // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
3561 // r0 is newly allocated space.
3562
3563 // Reuse literal variable with (possibly) a new register, still holding
3564 // the materialized boilerplate.
3565 literal = frame_->PopToRegister(r0);
3566
3567 __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
3568
3569 // Push the clone.
3570 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003571 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003572}
3573
3574
Steve Blocka7e24c12009-10-30 11:49:00 +00003575void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3576#ifdef DEBUG
3577 int original_height = frame_->height();
3578#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003579 Comment cmnt(masm_, "[ ObjectLiteral");
3580
Steve Block8defd9f2010-07-08 12:39:36 +01003581 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003582 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003583 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003584 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003585 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3586 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003587 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003588 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003589 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003590 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003591 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003592 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003593 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003594 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003595 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003596 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003597 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003598 frame_->EmitPush(r0); // save the result
Steve Blocka7e24c12009-10-30 11:49:00 +00003599 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003600 // At the start of each iteration, the top of stack contains
3601 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003602 ObjectLiteral::Property* property = node->properties()->at(i);
3603 Literal* key = property->key();
3604 Expression* value = property->value();
3605 switch (property->kind()) {
3606 case ObjectLiteral::Property::CONSTANT:
3607 break;
3608 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3609 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3610 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003611 case ObjectLiteral::Property::COMPUTED:
3612 if (key->handle()->IsSymbol()) {
3613 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003614 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003615 frame_->PopToR0();
3616 // Fetch the object literal.
3617 frame_->SpillAllButCopyTOSToR1();
Andrei Popescu402d9372010-02-26 13:31:12 +00003618 __ mov(r2, Operand(key->handle()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003619 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3620 break;
3621 }
3622 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003623 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003624 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003625 Load(key);
3626 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003627 frame_->CallRuntime(Runtime::kSetProperty, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003628 break;
3629 }
3630 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003631 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003632 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003633 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003634 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003635 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003636 break;
3637 }
3638 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003639 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003640 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003641 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003642 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003643 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003644 break;
3645 }
3646 }
3647 }
Steve Block6ded16b2010-05-10 14:33:55 +01003648 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003649}
3650
3651
Steve Blocka7e24c12009-10-30 11:49:00 +00003652void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3653#ifdef DEBUG
3654 int original_height = frame_->height();
3655#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003656 Comment cmnt(masm_, "[ ArrayLiteral");
3657
Steve Block8defd9f2010-07-08 12:39:36 +01003658 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003659 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003660 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003661 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003662 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3663 frame_->EmitPush(tos);
3664 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3665 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003666 int length = node->values()->length();
Iain Merrick75681382010-08-19 15:07:18 +01003667 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
3668 FastCloneShallowArrayStub stub(
3669 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3670 frame_->CallStub(&stub, 3);
3671 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
3672 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00003673 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01003674 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003675 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003676 } else {
Iain Merrick75681382010-08-19 15:07:18 +01003677 FastCloneShallowArrayStub stub(
3678 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Andrei Popescu402d9372010-02-26 13:31:12 +00003679 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003680 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003681 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003682 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003683
3684 // Generate code to set the elements in the array that are not
3685 // literals.
3686 for (int i = 0; i < node->values()->length(); i++) {
3687 Expression* value = node->values()->at(i);
3688
3689 // If value is a literal the property value is already set in the
3690 // boilerplate object.
3691 if (value->AsLiteral() != NULL) continue;
3692 // If value is a materialized literal the property value is already set
3693 // in the boilerplate object if it is simple.
3694 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3695
3696 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003697 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003698 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003699 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003700 frame_->SpillAllButCopyTOSToR1();
3701
Steve Blocka7e24c12009-10-30 11:49:00 +00003702 // Get the elements array.
3703 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3704
3705 // Write to the indexed properties array.
3706 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3707 __ str(r0, FieldMemOperand(r1, offset));
3708
3709 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003710 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003711 }
Steve Block6ded16b2010-05-10 14:33:55 +01003712 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003713}
3714
3715
3716void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3717#ifdef DEBUG
3718 int original_height = frame_->height();
3719#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003720 // Call runtime routine to allocate the catch extension object and
3721 // assign the exception value to the catch variable.
3722 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003723 Load(node->key());
3724 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003725 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3726 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003727 ASSERT_EQ(original_height + 1, frame_->height());
3728}
3729
3730
3731void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3732#ifdef DEBUG
3733 int original_height = frame_->height();
3734#endif
3735 Comment cmnt(masm(), "[ Variable Assignment");
3736 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3737 ASSERT(var != NULL);
3738 Slot* slot = var->slot();
3739 ASSERT(slot != NULL);
3740
3741 // Evaluate the right-hand side.
3742 if (node->is_compound()) {
3743 // For a compound assignment the right-hand side is a binary operation
3744 // between the current property value and the actual right-hand side.
3745 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3746
3747 // Perform the binary operation.
3748 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003749 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003750 if (literal != NULL && literal->handle()->IsSmi()) {
3751 SmiOperation(node->binary_op(),
3752 literal->handle(),
3753 false,
3754 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3755 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003756 GenerateInlineSmi inline_smi =
3757 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3758 if (literal != NULL) {
3759 ASSERT(!literal->handle()->IsSmi());
3760 inline_smi = DONT_GENERATE_INLINE_SMI;
3761 }
Steve Block6ded16b2010-05-10 14:33:55 +01003762 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003763 GenericBinaryOperation(node->binary_op(),
3764 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3765 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003766 }
3767 } else {
3768 Load(node->value());
3769 }
3770
3771 // Perform the assignment.
3772 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3773 CodeForSourcePosition(node->position());
3774 StoreToSlot(slot,
3775 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3776 }
3777 ASSERT_EQ(original_height + 1, frame_->height());
3778}
3779
3780
3781void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3782#ifdef DEBUG
3783 int original_height = frame_->height();
3784#endif
3785 Comment cmnt(masm(), "[ Named Property Assignment");
3786 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3787 Property* prop = node->target()->AsProperty();
3788 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3789
3790 // Initialize name and evaluate the receiver sub-expression if necessary. If
3791 // the receiver is trivial it is not placed on the stack at this point, but
3792 // loaded whenever actually needed.
3793 Handle<String> name;
3794 bool is_trivial_receiver = false;
3795 if (var != NULL) {
3796 name = var->name();
3797 } else {
3798 Literal* lit = prop->key()->AsLiteral();
3799 ASSERT_NOT_NULL(lit);
3800 name = Handle<String>::cast(lit->handle());
3801 // Do not materialize the receiver on the frame if it is trivial.
3802 is_trivial_receiver = prop->obj()->IsTrivial();
3803 if (!is_trivial_receiver) Load(prop->obj());
3804 }
3805
3806 // Change to slow case in the beginning of an initialization block to
3807 // avoid the quadratic behavior of repeatedly adding fast properties.
3808 if (node->starts_initialization_block()) {
3809 // Initialization block consists of assignments of the form expr.x = ..., so
3810 // this will never be an assignment to a variable, so there must be a
3811 // receiver object.
3812 ASSERT_EQ(NULL, var);
3813 if (is_trivial_receiver) {
3814 Load(prop->obj());
3815 } else {
3816 frame_->Dup();
3817 }
3818 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3819 }
3820
3821 // Change to fast case at the end of an initialization block. To prepare for
3822 // that add an extra copy of the receiver to the frame, so that it can be
3823 // converted back to fast case after the assignment.
3824 if (node->ends_initialization_block() && !is_trivial_receiver) {
3825 frame_->Dup();
3826 }
3827
3828 // Stack layout:
3829 // [tos] : receiver (only materialized if non-trivial)
3830 // [tos+1] : receiver if at the end of an initialization block
3831
3832 // Evaluate the right-hand side.
3833 if (node->is_compound()) {
3834 // For a compound assignment the right-hand side is a binary operation
3835 // between the current property value and the actual right-hand side.
3836 if (is_trivial_receiver) {
3837 Load(prop->obj());
3838 } else if (var != NULL) {
3839 LoadGlobal();
3840 } else {
3841 frame_->Dup();
3842 }
3843 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003844
3845 // Perform the binary operation.
3846 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003847 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003848 if (literal != NULL && literal->handle()->IsSmi()) {
3849 SmiOperation(node->binary_op(),
3850 literal->handle(),
3851 false,
3852 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3853 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003854 GenerateInlineSmi inline_smi =
3855 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3856 if (literal != NULL) {
3857 ASSERT(!literal->handle()->IsSmi());
3858 inline_smi = DONT_GENERATE_INLINE_SMI;
3859 }
Steve Block6ded16b2010-05-10 14:33:55 +01003860 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003861 GenericBinaryOperation(node->binary_op(),
3862 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3863 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003864 }
3865 } else {
3866 // For non-compound assignment just load the right-hand side.
3867 Load(node->value());
3868 }
3869
3870 // Stack layout:
3871 // [tos] : value
3872 // [tos+1] : receiver (only materialized if non-trivial)
3873 // [tos+2] : receiver if at the end of an initialization block
3874
3875 // Perform the assignment. It is safe to ignore constants here.
3876 ASSERT(var == NULL || var->mode() != Variable::CONST);
3877 ASSERT_NE(Token::INIT_CONST, node->op());
3878 if (is_trivial_receiver) {
3879 // Load the receiver and swap with the value.
3880 Load(prop->obj());
3881 Register t0 = frame_->PopToRegister();
3882 Register t1 = frame_->PopToRegister(t0);
3883 frame_->EmitPush(t0);
3884 frame_->EmitPush(t1);
3885 }
3886 CodeForSourcePosition(node->position());
3887 bool is_contextual = (var != NULL);
3888 EmitNamedStore(name, is_contextual);
3889 frame_->EmitPush(r0);
3890
3891 // Change to fast case at the end of an initialization block.
3892 if (node->ends_initialization_block()) {
3893 ASSERT_EQ(NULL, var);
3894 // The argument to the runtime call is the receiver.
3895 if (is_trivial_receiver) {
3896 Load(prop->obj());
3897 } else {
3898 // A copy of the receiver is below the value of the assignment. Swap
3899 // the receiver and the value of the assignment expression.
3900 Register t0 = frame_->PopToRegister();
3901 Register t1 = frame_->PopToRegister(t0);
3902 frame_->EmitPush(t0);
3903 frame_->EmitPush(t1);
3904 }
3905 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3906 }
3907
3908 // Stack layout:
3909 // [tos] : result
3910
3911 ASSERT_EQ(original_height + 1, frame_->height());
3912}
3913
3914
3915void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3916#ifdef DEBUG
3917 int original_height = frame_->height();
3918#endif
3919 Comment cmnt(masm_, "[ Keyed Property Assignment");
3920 Property* prop = node->target()->AsProperty();
3921 ASSERT_NOT_NULL(prop);
3922
3923 // Evaluate the receiver subexpression.
3924 Load(prop->obj());
3925
Steve Block8defd9f2010-07-08 12:39:36 +01003926 WriteBarrierCharacter wb_info;
3927
Steve Block6ded16b2010-05-10 14:33:55 +01003928 // Change to slow case in the beginning of an initialization block to
3929 // avoid the quadratic behavior of repeatedly adding fast properties.
3930 if (node->starts_initialization_block()) {
3931 frame_->Dup();
3932 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3933 }
3934
3935 // Change to fast case at the end of an initialization block. To prepare for
3936 // that add an extra copy of the receiver to the frame, so that it can be
3937 // converted back to fast case after the assignment.
3938 if (node->ends_initialization_block()) {
3939 frame_->Dup();
3940 }
3941
3942 // Evaluate the key subexpression.
3943 Load(prop->key());
3944
3945 // Stack layout:
3946 // [tos] : key
3947 // [tos+1] : receiver
3948 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003949 //
Steve Block6ded16b2010-05-10 14:33:55 +01003950 // Evaluate the right-hand side.
3951 if (node->is_compound()) {
3952 // For a compound assignment the right-hand side is a binary operation
3953 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003954 // Duplicate receiver and key for loading the current property value.
3955 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003956 EmitKeyedLoad();
3957 frame_->EmitPush(r0);
3958
3959 // Perform the binary operation.
3960 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003961 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003962 if (literal != NULL && literal->handle()->IsSmi()) {
3963 SmiOperation(node->binary_op(),
3964 literal->handle(),
3965 false,
3966 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3967 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003968 GenerateInlineSmi inline_smi =
3969 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3970 if (literal != NULL) {
3971 ASSERT(!literal->handle()->IsSmi());
3972 inline_smi = DONT_GENERATE_INLINE_SMI;
3973 }
Steve Block6ded16b2010-05-10 14:33:55 +01003974 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003975 GenericBinaryOperation(node->binary_op(),
3976 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3977 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003978 }
Steve Block8defd9f2010-07-08 12:39:36 +01003979 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01003980 } else {
3981 // For non-compound assignment just load the right-hand side.
3982 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01003983 wb_info = node->value()->AsLiteral() != NULL ?
3984 NEVER_NEWSPACE :
3985 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01003986 }
3987
3988 // Stack layout:
3989 // [tos] : value
3990 // [tos+1] : key
3991 // [tos+2] : receiver
3992 // [tos+3] : receiver if at the end of an initialization block
3993
3994 // Perform the assignment. It is safe to ignore constants here.
3995 ASSERT(node->op() != Token::INIT_CONST);
3996 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01003997 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01003998 frame_->EmitPush(r0);
3999
4000 // Stack layout:
4001 // [tos] : result
4002 // [tos+1] : receiver if at the end of an initialization block
4003
4004 // Change to fast case at the end of an initialization block.
4005 if (node->ends_initialization_block()) {
4006 // The argument to the runtime call is the extra copy of the receiver,
4007 // which is below the value of the assignment. Swap the receiver and
4008 // the value of the assignment expression.
4009 Register t0 = frame_->PopToRegister();
4010 Register t1 = frame_->PopToRegister(t0);
4011 frame_->EmitPush(t1);
4012 frame_->EmitPush(t0);
4013 frame_->CallRuntime(Runtime::kToFastProperties, 1);
4014 }
4015
4016 // Stack layout:
4017 // [tos] : result
4018
4019 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004020}
4021
4022
4023void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01004024 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004025#ifdef DEBUG
4026 int original_height = frame_->height();
4027#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004028 Comment cmnt(masm_, "[ Assignment");
4029
Steve Block6ded16b2010-05-10 14:33:55 +01004030 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4031 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00004032
Steve Block6ded16b2010-05-10 14:33:55 +01004033 if (var != NULL && !var->is_global()) {
4034 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004035
Steve Block6ded16b2010-05-10 14:33:55 +01004036 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4037 (var != NULL && var->is_global())) {
4038 // Properties whose keys are property names and global variables are
4039 // treated as named property references. We do not need to consider
4040 // global 'this' because it is not a valid left-hand side.
4041 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004042
Steve Block6ded16b2010-05-10 14:33:55 +01004043 } else if (prop != NULL) {
4044 // Other properties (including rewritten parameters for a function that
4045 // uses arguments) are keyed property assignments.
4046 EmitKeyedPropertyAssignment(node);
4047
4048 } else {
4049 // Invalid left-hand side.
4050 Load(node->target());
4051 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4052 // The runtime call doesn't actually return but the code generator will
4053 // still generate code and expects a certain frame height.
4054 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004055 }
Steve Block6ded16b2010-05-10 14:33:55 +01004056 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004057}
4058
4059
4060void CodeGenerator::VisitThrow(Throw* node) {
4061#ifdef DEBUG
4062 int original_height = frame_->height();
4063#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004064 Comment cmnt(masm_, "[ Throw");
4065
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004066 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00004067 CodeForSourcePosition(node->position());
4068 frame_->CallRuntime(Runtime::kThrow, 1);
4069 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01004070 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004071}
4072
4073
4074void CodeGenerator::VisitProperty(Property* node) {
4075#ifdef DEBUG
4076 int original_height = frame_->height();
4077#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004078 Comment cmnt(masm_, "[ Property");
4079
4080 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01004081 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004082 }
Steve Block6ded16b2010-05-10 14:33:55 +01004083 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004084}
4085
4086
4087void CodeGenerator::VisitCall(Call* node) {
4088#ifdef DEBUG
4089 int original_height = frame_->height();
4090#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004091 Comment cmnt(masm_, "[ Call");
4092
4093 Expression* function = node->expression();
4094 ZoneList<Expression*>* args = node->arguments();
4095
4096 // Standard function call.
4097 // Check if the function is a variable or a property.
4098 Variable* var = function->AsVariableProxy()->AsVariable();
4099 Property* property = function->AsProperty();
4100
4101 // ------------------------------------------------------------------------
4102 // Fast-case: Use inline caching.
4103 // ---
4104 // According to ECMA-262, section 11.2.3, page 44, the function to call
4105 // must be resolved after the arguments have been evaluated. The IC code
4106 // automatically handles this by loading the arguments before the function
4107 // is resolved in cache misses (this also holds for megamorphic calls).
4108 // ------------------------------------------------------------------------
4109
4110 if (var != NULL && var->is_possibly_eval()) {
4111 // ----------------------------------
4112 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4113 // ----------------------------------
4114
4115 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4116 // resolve the function we need to call and the receiver of the
4117 // call. Then we call the resolved function using the given
4118 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004119
Steve Blocka7e24c12009-10-30 11:49:00 +00004120 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004121 Load(function);
4122
4123 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01004124 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004125
4126 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004127 int arg_count = args->length();
4128 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004129 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004130 }
4131
Steve Block8defd9f2010-07-08 12:39:36 +01004132 VirtualFrame::SpilledScope spilled_scope(frame_);
4133
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004134 // If we know that eval can only be shadowed by eval-introduced
4135 // variables we attempt to load the global eval function directly
4136 // in generated code. If we succeed, there is no need to perform a
4137 // context lookup in the runtime system.
4138 JumpTarget done;
4139 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
4140 ASSERT(var->slot()->type() == Slot::LOOKUP);
4141 JumpTarget slow;
4142 // Prepare the stack for the call to
4143 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
4144 // function, the first argument to the eval call and the
4145 // receiver.
4146 LoadFromGlobalSlotCheckExtensions(var->slot(),
4147 NOT_INSIDE_TYPEOF,
4148 &slow);
4149 frame_->EmitPush(r0);
4150 if (arg_count > 0) {
4151 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4152 frame_->EmitPush(r1);
4153 } else {
4154 frame_->EmitPush(r2);
4155 }
4156 __ ldr(r1, frame_->Receiver());
4157 frame_->EmitPush(r1);
4158
4159 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
4160
4161 done.Jump();
4162 slow.Bind();
4163 }
4164
4165 // Prepare the stack for the call to ResolvePossiblyDirectEval by
4166 // pushing the loaded function, the first argument to the eval
4167 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004168 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
4169 frame_->EmitPush(r1);
4170 if (arg_count > 0) {
4171 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4172 frame_->EmitPush(r1);
4173 } else {
4174 frame_->EmitPush(r2);
4175 }
Leon Clarkee46be812010-01-19 14:06:41 +00004176 __ ldr(r1, frame_->Receiver());
4177 frame_->EmitPush(r1);
4178
Steve Blocka7e24c12009-10-30 11:49:00 +00004179 // Resolve the call.
Leon Clarkee46be812010-01-19 14:06:41 +00004180 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004181
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004182 // If we generated fast-case code bind the jump-target where fast
4183 // and slow case merge.
4184 if (done.is_linked()) done.Bind();
4185
Steve Blocka7e24c12009-10-30 11:49:00 +00004186 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00004187 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004188 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
4189
4190 // Call the function.
4191 CodeForSourcePosition(node->position());
4192
4193 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004194 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004195 frame_->CallStub(&call_function, arg_count + 1);
4196
4197 __ ldr(cp, frame_->Context());
4198 // Remove the function from the stack.
4199 frame_->Drop();
4200 frame_->EmitPush(r0);
4201
4202 } else if (var != NULL && !var->is_this() && var->is_global()) {
4203 // ----------------------------------
4204 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4205 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00004206 // Pass the global object as the receiver and let the IC stub
4207 // patch the stack to use the global proxy as 'this' in the
4208 // invoked function.
4209 LoadGlobal();
4210
4211 // Load the arguments.
4212 int arg_count = args->length();
4213 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004214 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004215 }
4216
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004217 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00004218 // Setup the name register and call the IC initialization code.
4219 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004220 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4221 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
4222 CodeForSourcePosition(node->position());
4223 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4224 arg_count + 1);
4225 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00004226 frame_->EmitPush(r0);
4227
4228 } else if (var != NULL && var->slot() != NULL &&
4229 var->slot()->type() == Slot::LOOKUP) {
4230 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01004231 // JavaScript examples:
4232 //
4233 // with (obj) foo(1, 2, 3) // foo may be in obj.
4234 //
4235 // function f() {};
4236 // function g() {
4237 // eval(...);
4238 // f(); // f could be in extension object.
4239 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00004240 // ----------------------------------
4241
Kristian Monsen25f61362010-05-21 11:50:48 +01004242 JumpTarget slow, done;
4243
4244 // Generate fast case for loading functions from slots that
4245 // correspond to local/global variables or arguments unless they
4246 // are shadowed by eval-introduced bindings.
4247 EmitDynamicLoadFromSlotFastCase(var->slot(),
4248 NOT_INSIDE_TYPEOF,
4249 &slow,
4250 &done);
4251
4252 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004253 // Load the function
4254 frame_->EmitPush(cp);
Iain Merrick75681382010-08-19 15:07:18 +01004255 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004256 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4257 // r0: slot value; r1: receiver
4258
4259 // Load the receiver.
4260 frame_->EmitPush(r0); // function
4261 frame_->EmitPush(r1); // receiver
4262
Kristian Monsen25f61362010-05-21 11:50:48 +01004263 // If fast case code has been generated, emit code to push the
4264 // function and receiver and have the slow path jump around this
4265 // code.
4266 if (done.is_linked()) {
4267 JumpTarget call;
4268 call.Jump();
4269 done.Bind();
4270 frame_->EmitPush(r0); // function
Iain Merrick75681382010-08-19 15:07:18 +01004271 LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver
Kristian Monsen25f61362010-05-21 11:50:48 +01004272 call.Bind();
4273 }
4274
4275 // Call the function. At this point, everything is spilled but the
4276 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004277 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004278 frame_->EmitPush(r0);
4279
4280 } else if (property != NULL) {
4281 // Check if the key is a literal string.
4282 Literal* literal = property->key()->AsLiteral();
4283
4284 if (literal != NULL && literal->handle()->IsSymbol()) {
4285 // ------------------------------------------------------------------
4286 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4287 // ------------------------------------------------------------------
4288
Steve Block6ded16b2010-05-10 14:33:55 +01004289 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004290
Steve Block6ded16b2010-05-10 14:33:55 +01004291 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4292 name->IsEqualTo(CStrVector("apply")) &&
4293 args->length() == 2 &&
4294 args->at(1)->AsVariableProxy() != NULL &&
4295 args->at(1)->AsVariableProxy()->IsArguments()) {
4296 // Use the optimized Function.prototype.apply that avoids
4297 // allocating lazily allocated arguments objects.
4298 CallApplyLazy(property->obj(),
4299 args->at(0),
4300 args->at(1)->AsVariableProxy(),
4301 node->position());
4302
4303 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004304 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004305 // Load the arguments.
4306 int arg_count = args->length();
4307 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004308 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004309 }
4310
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004311 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004312 // Set the name register and call the IC initialization code.
4313 __ mov(r2, Operand(name));
4314 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4315 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
4316 CodeForSourcePosition(node->position());
4317 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4318 __ ldr(cp, frame_->Context());
4319 frame_->EmitPush(r0);
4320 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004321
4322 } else {
4323 // -------------------------------------------
4324 // JavaScript example: 'array[index](1, 2, 3)'
4325 // -------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004326 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00004327 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004328 Load(property->key());
4329 EmitKeyedLoad();
4330 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004331 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004332 frame_->EmitPush(r0); // Function.
Iain Merrick75681382010-08-19 15:07:18 +01004333 LoadGlobalReceiver(VirtualFrame::scratch0());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004334 // Call the function.
4335 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4336 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004337 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004338 // Load the arguments.
4339 int arg_count = args->length();
4340 for (int i = 0; i < arg_count; i++) {
4341 Load(args->at(i));
4342 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004343
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004344 // Set the name register and call the IC initialization code.
4345 Load(property->key());
Iain Merrick75681382010-08-19 15:07:18 +01004346 frame_->SpillAll();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004347 frame_->EmitPop(r2); // Function name.
4348
4349 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4350 Handle<Code> stub = ComputeKeyedCallInitialize(arg_count, in_loop);
4351 CodeForSourcePosition(node->position());
4352 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4353 __ ldr(cp, frame_->Context());
4354 frame_->EmitPush(r0);
4355 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004356 }
4357
4358 } else {
4359 // ----------------------------------
4360 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4361 // ----------------------------------
4362
4363 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004364 Load(function);
4365
Steve Blocka7e24c12009-10-30 11:49:00 +00004366 // Pass the global proxy as the receiver.
Iain Merrick75681382010-08-19 15:07:18 +01004367 LoadGlobalReceiver(VirtualFrame::scratch0());
Steve Blocka7e24c12009-10-30 11:49:00 +00004368
4369 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004370 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004371 frame_->EmitPush(r0);
4372 }
Steve Block6ded16b2010-05-10 14:33:55 +01004373 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004374}
4375
4376
4377void CodeGenerator::VisitCallNew(CallNew* node) {
4378#ifdef DEBUG
4379 int original_height = frame_->height();
4380#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004381 Comment cmnt(masm_, "[ CallNew");
4382
4383 // According to ECMA-262, section 11.2.2, page 44, the function
4384 // expression in new calls must be evaluated before the
4385 // arguments. This is different from ordinary calls, where the
4386 // actual function to call is resolved after the arguments have been
4387 // evaluated.
4388
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004389 // Push constructor on the stack. If it's not a function it's used as
4390 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
4391 // ignored.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004392 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004393
4394 // Push the arguments ("left-to-right") on the stack.
4395 ZoneList<Expression*>* args = node->arguments();
4396 int arg_count = args->length();
4397 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004398 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004399 }
4400
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004401 // Spill everything from here to simplify the implementation.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004402 VirtualFrame::SpilledScope spilled_scope(frame_);
4403
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004404 // Load the argument count into r0 and the function into r1 as per
4405 // calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004406 __ mov(r0, Operand(arg_count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004407 __ ldr(r1, frame_->ElementAt(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004408
4409 // Call the construct call builtin that handles allocation and
4410 // constructor invocation.
4411 CodeForSourcePosition(node->position());
4412 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004413 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004414 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004415
Steve Block6ded16b2010-05-10 14:33:55 +01004416 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004417}
4418
4419
4420void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004421 Register scratch = VirtualFrame::scratch0();
4422 JumpTarget null, function, leave, non_function_constructor;
Steve Blocka7e24c12009-10-30 11:49:00 +00004423
Iain Merrick75681382010-08-19 15:07:18 +01004424 // Load the object into register.
4425 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004426 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004427 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004428
4429 // If the object is a smi, we return null.
Iain Merrick75681382010-08-19 15:07:18 +01004430 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004431 null.Branch(eq);
4432
4433 // Check that the object is a JS object but take special care of JS
4434 // functions to make sure they have 'Function' as their class.
Iain Merrick75681382010-08-19 15:07:18 +01004435 __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004436 null.Branch(lt);
4437
4438 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4439 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4440 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004441 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4442 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Iain Merrick75681382010-08-19 15:07:18 +01004443 __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00004444 function.Branch(eq);
4445
4446 // Check if the constructor in the map is a function.
Iain Merrick75681382010-08-19 15:07:18 +01004447 __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset));
4448 __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004449 non_function_constructor.Branch(ne);
4450
Iain Merrick75681382010-08-19 15:07:18 +01004451 // The tos register now contains the constructor function. Grab the
Steve Blocka7e24c12009-10-30 11:49:00 +00004452 // instance class name from there.
Iain Merrick75681382010-08-19 15:07:18 +01004453 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4454 __ ldr(tos,
4455 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4456 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004457 leave.Jump();
4458
4459 // Functions have class 'Function'.
4460 function.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004461 __ mov(tos, Operand(Factory::function_class_symbol()));
4462 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004463 leave.Jump();
4464
4465 // Objects with a non-function constructor have class 'Object'.
4466 non_function_constructor.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004467 __ mov(tos, Operand(Factory::Object_symbol()));
4468 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004469 leave.Jump();
4470
4471 // Non-JS objects have class null.
4472 null.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004473 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4474 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004475
4476 // All done.
4477 leave.Bind();
4478}
4479
4480
4481void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004482 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00004483 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004484
4485 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004486 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004487 Register tos = frame_->PopToRegister(); // tos contains object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004488 // if (object->IsSmi()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004489 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004490 leave.Branch(eq);
4491 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004492 __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004493 leave.Branch(ne);
4494 // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004495 __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004496 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004497 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004498}
4499
4500
4501void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004502 Register scratch1 = VirtualFrame::scratch0();
4503 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004504 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004505
4506 ASSERT(args->length() == 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004507 Load(args->at(0)); // Load the object.
4508 Load(args->at(1)); // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004509 Register value = frame_->PopToRegister();
4510 Register object = frame_->PopToRegister(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004511 // if (object->IsSmi()) return object.
Iain Merrick75681382010-08-19 15:07:18 +01004512 __ tst(object, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004513 leave.Branch(eq);
4514 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004515 __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004516 leave.Branch(ne);
4517 // Store the value.
Iain Merrick75681382010-08-19 15:07:18 +01004518 __ str(value, FieldMemOperand(object, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004519 // Update the write barrier.
Iain Merrick75681382010-08-19 15:07:18 +01004520 __ RecordWrite(object,
4521 Operand(JSValue::kValueOffset - kHeapObjectTag),
4522 scratch1,
4523 scratch2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004524 // Leave.
4525 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004526 frame_->EmitPush(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004527}
4528
4529
4530void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004531 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004532 Load(args->at(0));
4533 Register reg = frame_->PopToRegister();
4534 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004535 cc_reg_ = eq;
4536}
4537
4538
4539void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4541 ASSERT_EQ(args->length(), 3);
4542#ifdef ENABLE_LOGGING_AND_PROFILING
4543 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004544 Load(args->at(1));
4545 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004546 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004547 }
4548#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004549 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004550}
4551
4552
4553void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004554 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004555 Load(args->at(0));
4556 Register reg = frame_->PopToRegister();
4557 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004558 cc_reg_ = eq;
4559}
4560
4561
Steve Block8defd9f2010-07-08 12:39:36 +01004562// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004563void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4564 ASSERT(args->length() == 2);
4565 Load(args->at(0));
4566 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004567
4568 if (!CpuFeatures::IsSupported(VFP3)) {
4569 frame_->CallRuntime(Runtime::kMath_pow, 2);
4570 frame_->EmitPush(r0);
4571 } else {
4572 CpuFeatures::Scope scope(VFP3);
4573 JumpTarget runtime, done;
4574 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4575
4576 Register scratch1 = VirtualFrame::scratch0();
4577 Register scratch2 = VirtualFrame::scratch1();
4578
4579 // Get base and exponent to registers.
4580 Register exponent = frame_->PopToRegister();
4581 Register base = frame_->PopToRegister(exponent);
4582 Register heap_number_map = no_reg;
4583
4584 // Set the frame for the runtime jump target. The code below jumps to the
4585 // jump target label so the frame needs to be established before that.
4586 ASSERT(runtime.entry_frame() == NULL);
4587 runtime.set_entry_frame(frame_);
4588
4589 __ BranchOnNotSmi(exponent, &exponent_nonsmi);
4590 __ BranchOnNotSmi(base, &base_nonsmi);
4591
4592 heap_number_map = r6;
4593 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4594
4595 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4596 // d1.
4597 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4598 __ b(&powi);
4599
4600 __ bind(&base_nonsmi);
4601 // Exponent is smi and base is non smi. Get the double value from the base
4602 // into vfp register d1.
4603 __ ObjectToDoubleVFPRegister(base, d1,
4604 scratch1, scratch2, heap_number_map, s0,
4605 runtime.entry_label());
4606
4607 __ bind(&powi);
4608
4609 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004610 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004611
4612 // Get the absolute untagged value of the exponent and use that for the
4613 // calculation.
4614 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004615 // Negate if negative.
4616 __ rsb(scratch1, scratch1, Operand(0, RelocInfo::NONE), LeaveCC, mi);
Steve Block8defd9f2010-07-08 12:39:36 +01004617 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4618
4619 // Run through all the bits in the exponent. The result is calculated in d0
4620 // and d1 holds base^(bit^2).
4621 Label more_bits;
4622 __ bind(&more_bits);
4623 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4624 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4625 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4626 __ b(ne, &more_bits);
4627
4628 // If exponent is positive we are done.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004629 __ cmp(exponent, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004630 __ b(ge, &allocate_return);
4631
4632 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4633 // case). However if d0 has reached infinity this will not provide the
4634 // correct result, so call runtime if that is the case.
4635 __ mov(scratch2, Operand(0x7FF00000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01004636 __ mov(scratch1, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004637 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
4638 __ vcmp(d0, d1);
4639 __ vmrs(pc);
4640 runtime.Branch(eq); // d0 reached infinity.
4641 __ vdiv(d0, d2, d0);
4642 __ b(&allocate_return);
4643
4644 __ bind(&exponent_nonsmi);
4645 // Special handling of raising to the power of -0.5 and 0.5. First check
4646 // that the value is a heap number and that the lower bits (which for both
4647 // values are zero).
4648 heap_number_map = r6;
4649 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4650 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4651 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4652 __ cmp(scratch1, heap_number_map);
4653 runtime.Branch(ne);
4654 __ tst(scratch2, scratch2);
4655 runtime.Branch(ne);
4656
4657 // Load the higher bits (which contains the floating point exponent).
4658 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4659
4660 // Compare exponent with -0.5.
4661 __ cmp(scratch1, Operand(0xbfe00000));
4662 __ b(ne, &not_minus_half);
4663
4664 // Get the double value from the base into vfp register d0.
4665 __ ObjectToDoubleVFPRegister(base, d0,
4666 scratch1, scratch2, heap_number_map, s0,
4667 runtime.entry_label(),
4668 AVOID_NANS_AND_INFINITIES);
4669
4670 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004671 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004672
4673 // Calculate the reciprocal of the square root. 1/sqrt(x) = sqrt(1/x).
4674 __ vdiv(d0, d2, d0);
4675 __ vsqrt(d0, d0);
4676
4677 __ b(&allocate_return);
4678
4679 __ bind(&not_minus_half);
4680 // Compare exponent with 0.5.
4681 __ cmp(scratch1, Operand(0x3fe00000));
4682 runtime.Branch(ne);
4683
4684 // Get the double value from the base into vfp register d0.
4685 __ ObjectToDoubleVFPRegister(base, d0,
4686 scratch1, scratch2, heap_number_map, s0,
4687 runtime.entry_label(),
4688 AVOID_NANS_AND_INFINITIES);
4689 __ vsqrt(d0, d0);
4690
4691 __ bind(&allocate_return);
4692 Register scratch3 = r5;
4693 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4694 heap_number_map, runtime.entry_label());
4695 __ mov(base, scratch3);
4696 done.Jump();
4697
4698 runtime.Bind();
4699
4700 // Push back the arguments again for the runtime call.
4701 frame_->EmitPush(base);
4702 frame_->EmitPush(exponent);
4703 frame_->CallRuntime(Runtime::kMath_pow, 2);
4704 __ Move(base, r0);
4705
4706 done.Bind();
4707 frame_->EmitPush(base);
4708 }
Steve Block6ded16b2010-05-10 14:33:55 +01004709}
4710
4711
Steve Block8defd9f2010-07-08 12:39:36 +01004712// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004713void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4714 ASSERT(args->length() == 1);
4715 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004716
4717 if (!CpuFeatures::IsSupported(VFP3)) {
4718 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4719 frame_->EmitPush(r0);
4720 } else {
4721 CpuFeatures::Scope scope(VFP3);
4722 JumpTarget runtime, done;
4723
4724 Register scratch1 = VirtualFrame::scratch0();
4725 Register scratch2 = VirtualFrame::scratch1();
4726
4727 // Get the value from the frame.
4728 Register tos = frame_->PopToRegister();
4729
4730 // Set the frame for the runtime jump target. The code below jumps to the
4731 // jump target label so the frame needs to be established before that.
4732 ASSERT(runtime.entry_frame() == NULL);
4733 runtime.set_entry_frame(frame_);
4734
4735 Register heap_number_map = r6;
4736 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4737
4738 // Get the double value from the heap number into vfp register d0.
4739 __ ObjectToDoubleVFPRegister(tos, d0,
4740 scratch1, scratch2, heap_number_map, s0,
4741 runtime.entry_label());
4742
4743 // Calculate the square root of d0 and place result in a heap number object.
4744 __ vsqrt(d0, d0);
4745 __ AllocateHeapNumberWithValue(
4746 tos, d0, scratch1, scratch2, heap_number_map, runtime.entry_label());
4747 done.Jump();
4748
4749 runtime.Bind();
4750 // Push back the argument again for the runtime call.
4751 frame_->EmitPush(tos);
4752 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4753 __ Move(tos, r0);
4754
4755 done.Bind();
4756 frame_->EmitPush(tos);
4757 }
Steve Block6ded16b2010-05-10 14:33:55 +01004758}
4759
4760
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004761class DeferredStringCharCodeAt : public DeferredCode {
4762 public:
4763 DeferredStringCharCodeAt(Register object,
4764 Register index,
4765 Register scratch,
4766 Register result)
4767 : result_(result),
4768 char_code_at_generator_(object,
4769 index,
4770 scratch,
4771 result,
4772 &need_conversion_,
4773 &need_conversion_,
4774 &index_out_of_range_,
4775 STRING_INDEX_IS_NUMBER) {}
4776
4777 StringCharCodeAtGenerator* fast_case_generator() {
4778 return &char_code_at_generator_;
4779 }
4780
4781 virtual void Generate() {
4782 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4783 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4784
4785 __ bind(&need_conversion_);
4786 // Move the undefined value into the result register, which will
4787 // trigger conversion.
4788 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4789 __ jmp(exit_label());
4790
4791 __ bind(&index_out_of_range_);
4792 // When the index is out of range, the spec requires us to return
4793 // NaN.
4794 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4795 __ jmp(exit_label());
4796 }
4797
4798 private:
4799 Register result_;
4800
4801 Label need_conversion_;
4802 Label index_out_of_range_;
4803
4804 StringCharCodeAtGenerator char_code_at_generator_;
4805};
4806
4807
4808// This generates code that performs a String.prototype.charCodeAt() call
4809// or returns a smi in order to trigger conversion.
4810void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004811 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004812 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004813
Leon Clarkef7060e22010-06-03 12:02:55 +01004814 Load(args->at(0));
4815 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004816
Iain Merrick75681382010-08-19 15:07:18 +01004817 Register index = frame_->PopToRegister();
4818 Register object = frame_->PopToRegister(index);
Steve Blockd0582a62009-12-15 09:54:21 +00004819
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004820 // We need two extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004821 Register scratch = VirtualFrame::scratch0();
4822 Register result = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004823
4824 DeferredStringCharCodeAt* deferred =
4825 new DeferredStringCharCodeAt(object,
4826 index,
4827 scratch,
4828 result);
4829 deferred->fast_case_generator()->GenerateFast(masm_);
4830 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004831 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004832}
4833
4834
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004835class DeferredStringCharFromCode : public DeferredCode {
4836 public:
4837 DeferredStringCharFromCode(Register code,
4838 Register result)
4839 : char_from_code_generator_(code, result) {}
4840
4841 StringCharFromCodeGenerator* fast_case_generator() {
4842 return &char_from_code_generator_;
4843 }
4844
4845 virtual void Generate() {
4846 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4847 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4848 }
4849
4850 private:
4851 StringCharFromCodeGenerator char_from_code_generator_;
4852};
4853
4854
4855// Generates code for creating a one-char string from a char code.
4856void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004857 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004858 ASSERT(args->length() == 1);
4859
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004860 Load(args->at(0));
4861
Iain Merrick75681382010-08-19 15:07:18 +01004862 Register result = frame_->GetTOSRegister();
4863 Register code = frame_->PopToRegister(result);
Steve Block6ded16b2010-05-10 14:33:55 +01004864
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004865 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4866 code, result);
4867 deferred->fast_case_generator()->GenerateFast(masm_);
4868 deferred->BindExit();
4869 frame_->EmitPush(result);
4870}
4871
4872
4873class DeferredStringCharAt : public DeferredCode {
4874 public:
4875 DeferredStringCharAt(Register object,
4876 Register index,
4877 Register scratch1,
4878 Register scratch2,
4879 Register result)
4880 : result_(result),
4881 char_at_generator_(object,
4882 index,
4883 scratch1,
4884 scratch2,
4885 result,
4886 &need_conversion_,
4887 &need_conversion_,
4888 &index_out_of_range_,
4889 STRING_INDEX_IS_NUMBER) {}
4890
4891 StringCharAtGenerator* fast_case_generator() {
4892 return &char_at_generator_;
4893 }
4894
4895 virtual void Generate() {
4896 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4897 char_at_generator_.GenerateSlow(masm(), call_helper);
4898
4899 __ bind(&need_conversion_);
4900 // Move smi zero into the result register, which will trigger
4901 // conversion.
4902 __ mov(result_, Operand(Smi::FromInt(0)));
4903 __ jmp(exit_label());
4904
4905 __ bind(&index_out_of_range_);
4906 // When the index is out of range, the spec requires us to return
4907 // the empty string.
4908 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4909 __ jmp(exit_label());
4910 }
4911
4912 private:
4913 Register result_;
4914
4915 Label need_conversion_;
4916 Label index_out_of_range_;
4917
4918 StringCharAtGenerator char_at_generator_;
4919};
4920
4921
4922// This generates code that performs a String.prototype.charAt() call
4923// or returns a smi in order to trigger conversion.
4924void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004925 Comment(masm_, "[ GenerateStringCharAt");
4926 ASSERT(args->length() == 2);
4927
4928 Load(args->at(0));
4929 Load(args->at(1));
4930
Iain Merrick75681382010-08-19 15:07:18 +01004931 Register index = frame_->PopToRegister();
4932 Register object = frame_->PopToRegister(index);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004933
4934 // We need three extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004935 Register scratch1 = VirtualFrame::scratch0();
4936 Register scratch2 = VirtualFrame::scratch1();
4937 // Use r6 without notifying the virtual frame.
4938 Register result = r6;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004939
4940 DeferredStringCharAt* deferred =
4941 new DeferredStringCharAt(object,
4942 index,
4943 scratch1,
4944 scratch2,
4945 result);
4946 deferred->fast_case_generator()->GenerateFast(masm_);
4947 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004948 frame_->EmitPush(result);
4949}
4950
4951
Steve Blocka7e24c12009-10-30 11:49:00 +00004952void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004953 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004954 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004955 JumpTarget answer;
4956 // We need the CC bits to come out as not_equal in the case where the
4957 // object is a smi. This can't be done with the usual test opcode so
4958 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004959 Register possible_array = frame_->PopToRegister();
4960 Register scratch = VirtualFrame::scratch0();
4961 __ and_(scratch, possible_array, Operand(kSmiTagMask));
4962 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00004963 answer.Branch(ne);
4964 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004965 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004966 answer.Bind();
4967 cc_reg_ = eq;
4968}
4969
4970
Andrei Popescu402d9372010-02-26 13:31:12 +00004971void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004972 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004973 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00004974 JumpTarget answer;
4975 // We need the CC bits to come out as not_equal in the case where the
4976 // object is a smi. This can't be done with the usual test opcode so
4977 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004978 Register possible_regexp = frame_->PopToRegister();
4979 Register scratch = VirtualFrame::scratch0();
4980 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
4981 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00004982 answer.Branch(ne);
4983 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004984 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00004985 answer.Bind();
4986 cc_reg_ = eq;
4987}
4988
4989
Steve Blockd0582a62009-12-15 09:54:21 +00004990void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
4991 // This generates a fast version of:
4992 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00004993 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004994 Load(args->at(0));
4995 Register possible_object = frame_->PopToRegister();
4996 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00004997 false_target()->Branch(eq);
4998
4999 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005000 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00005001 true_target()->Branch(eq);
5002
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005003 Register map_reg = VirtualFrame::scratch0();
5004 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00005005 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005006 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5007 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01005008 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00005009
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005010 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5011 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005012 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005013 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005014 cc_reg_ = le;
5015}
5016
5017
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005018void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
5019 // This generates a fast version of:
5020 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
5021 // typeof(arg) == function).
5022 // It includes undetectable objects (as opposed to IsObject).
5023 ASSERT(args->length() == 1);
5024 Load(args->at(0));
5025 Register value = frame_->PopToRegister();
5026 __ tst(value, Operand(kSmiTagMask));
5027 false_target()->Branch(eq);
5028 // Check that this is an object.
5029 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
5030 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
5031 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
5032 cc_reg_ = ge;
5033}
5034
5035
Iain Merrick75681382010-08-19 15:07:18 +01005036// Deferred code to check whether the String JavaScript object is safe for using
5037// default value of. This code is called after the bit caching this information
5038// in the map has been checked with the map for the object in the map_result_
5039// register. On return the register map_result_ contains 1 for true and 0 for
5040// false.
5041class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
5042 public:
5043 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
5044 Register map_result,
5045 Register scratch1,
5046 Register scratch2)
5047 : object_(object),
5048 map_result_(map_result),
5049 scratch1_(scratch1),
5050 scratch2_(scratch2) { }
5051
5052 virtual void Generate() {
5053 Label false_result;
5054
5055 // Check that map is loaded as expected.
5056 if (FLAG_debug_code) {
5057 __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
5058 __ cmp(map_result_, ip);
5059 __ Assert(eq, "Map not in expected register");
5060 }
5061
5062 // Check for fast case object. Generate false result for slow case object.
5063 __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
5064 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5065 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
5066 __ cmp(scratch1_, ip);
5067 __ b(eq, &false_result);
5068
5069 // Look for valueOf symbol in the descriptor array, and indicate false if
5070 // found. The type is not checked, so if it is a transition it is a false
5071 // negative.
5072 __ ldr(map_result_,
5073 FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
5074 __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
5075 // map_result_: descriptor array
5076 // scratch2_: length of descriptor array
5077 // Calculate the end of the descriptor array.
5078 STATIC_ASSERT(kSmiTag == 0);
5079 STATIC_ASSERT(kSmiTagSize == 1);
5080 STATIC_ASSERT(kPointerSize == 4);
5081 __ add(scratch1_,
5082 map_result_,
5083 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5084 __ add(scratch1_,
5085 scratch1_,
5086 Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
5087
5088 // Calculate location of the first key name.
5089 __ add(map_result_,
5090 map_result_,
5091 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5092 DescriptorArray::kFirstIndex * kPointerSize));
5093 // Loop through all the keys in the descriptor array. If one of these is the
5094 // symbol valueOf the result is false.
5095 Label entry, loop;
5096 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5097 // used in the loop below.
5098 __ mov(ip, Operand(Factory::value_of_symbol()));
5099 __ jmp(&entry);
5100 __ bind(&loop);
5101 __ ldr(scratch2_, MemOperand(map_result_, 0));
5102 __ cmp(scratch2_, ip);
5103 __ b(eq, &false_result);
5104 __ add(map_result_, map_result_, Operand(kPointerSize));
5105 __ bind(&entry);
5106 __ cmp(map_result_, Operand(scratch1_));
5107 __ b(ne, &loop);
5108
5109 // Reload map as register map_result_ was used as temporary above.
5110 __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5111
5112 // If a valueOf property is not found on the object check that it's
5113 // prototype is the un-modified String prototype. If not result is false.
5114 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
5115 __ tst(scratch1_, Operand(kSmiTagMask));
5116 __ b(eq, &false_result);
5117 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5118 __ ldr(scratch2_,
5119 CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX));
5120 __ ldr(scratch2_,
5121 FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
5122 __ ldr(scratch2_,
5123 CodeGenerator::ContextOperand(
5124 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
5125 __ cmp(scratch1_, scratch2_);
5126 __ b(ne, &false_result);
5127
5128 // Set the bit in the map to indicate that it has been checked safe for
5129 // default valueOf and set true result.
5130 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
5131 __ orr(scratch1_,
5132 scratch1_,
5133 Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5134 __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
5135 __ mov(map_result_, Operand(1));
5136 __ jmp(exit_label());
5137 __ bind(&false_result);
5138 // Set false result.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005139 __ mov(map_result_, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +01005140 }
5141
5142 private:
5143 Register object_;
5144 Register map_result_;
5145 Register scratch1_;
5146 Register scratch2_;
5147};
5148
5149
5150void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
5151 ZoneList<Expression*>* args) {
5152 ASSERT(args->length() == 1);
5153 Load(args->at(0));
5154 Register obj = frame_->PopToRegister(); // Pop the string wrapper.
5155 if (FLAG_debug_code) {
5156 __ AbortIfSmi(obj);
5157 }
5158
5159 // Check whether this map has already been checked to be safe for default
5160 // valueOf.
5161 Register map_result = VirtualFrame::scratch0();
5162 __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
5163 __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
5164 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5165 true_target()->Branch(ne);
5166
5167 // We need an additional two scratch registers for the deferred code.
5168 Register scratch1 = VirtualFrame::scratch1();
5169 // Use r6 without notifying the virtual frame.
5170 Register scratch2 = r6;
5171
5172 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
5173 new DeferredIsStringWrapperSafeForDefaultValueOf(
5174 obj, map_result, scratch1, scratch2);
5175 deferred->Branch(eq);
5176 deferred->BindExit();
5177 __ tst(map_result, Operand(map_result));
5178 cc_reg_ = ne;
5179}
5180
5181
Steve Blockd0582a62009-12-15 09:54:21 +00005182void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5183 // This generates a fast version of:
5184 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00005185 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005186 Load(args->at(0));
5187 Register possible_function = frame_->PopToRegister();
5188 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005189 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005190 Register map_reg = VirtualFrame::scratch0();
5191 Register scratch = VirtualFrame::scratch1();
5192 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005193 cc_reg_ = eq;
5194}
5195
5196
Leon Clarked91b9f72010-01-27 17:25:45 +00005197void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005198 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005199 Load(args->at(0));
5200 Register possible_undetectable = frame_->PopToRegister();
5201 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00005202 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005203 Register scratch = VirtualFrame::scratch0();
5204 __ ldr(scratch,
5205 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
5206 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5207 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00005208 cc_reg_ = ne;
5209}
5210
5211
Steve Blocka7e24c12009-10-30 11:49:00 +00005212void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005213 ASSERT(args->length() == 0);
5214
Leon Clarkef7060e22010-06-03 12:02:55 +01005215 Register scratch0 = VirtualFrame::scratch0();
5216 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005217 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005218 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005219
5220 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01005221 __ ldr(scratch1,
5222 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5223 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5224 __ ldr(scratch0,
5225 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00005226
5227 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005228 __ ldr(scratch1,
5229 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
5230 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005231 cc_reg_ = eq;
5232}
5233
5234
5235void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005236 ASSERT(args->length() == 0);
5237
Leon Clarkef7060e22010-06-03 12:02:55 +01005238 Register tos = frame_->GetTOSRegister();
5239 Register scratch0 = VirtualFrame::scratch0();
5240 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005241
Steve Block6ded16b2010-05-10 14:33:55 +01005242 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005243 __ ldr(scratch0,
5244 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5245 __ ldr(scratch1,
5246 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5247 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5248
5249 // Get the number of formal parameters.
5250 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005251
5252 // Arguments adaptor case: Read the arguments length from the
5253 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005254 __ ldr(tos,
5255 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
5256 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005257
Leon Clarkef7060e22010-06-03 12:02:55 +01005258 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00005259}
5260
5261
Steve Block6ded16b2010-05-10 14:33:55 +01005262void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005263 ASSERT(args->length() == 1);
5264
5265 // Satisfy contract with ArgumentsAccessStub:
5266 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005267 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01005268 frame_->PopToR1();
5269 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005270 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005271
5272 // Call the shared stub to get to arguments[key].
5273 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5274 frame_->CallStub(&stub, 0);
5275 frame_->EmitPush(r0);
5276}
5277
5278
Steve Block6ded16b2010-05-10 14:33:55 +01005279void CodeGenerator::GenerateRandomHeapNumber(
5280 ZoneList<Expression*>* args) {
5281 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005282 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005283
5284 Label slow_allocate_heapnumber;
5285 Label heapnumber_allocated;
5286
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01005287 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
5288 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01005289 __ jmp(&heapnumber_allocated);
5290
5291 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005292 // Allocate a heap number.
5293 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005294 __ mov(r4, Operand(r0));
5295
5296 __ bind(&heapnumber_allocated);
5297
5298 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5299 // by computing:
5300 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5301 if (CpuFeatures::IsSupported(VFP3)) {
5302 __ PrepareCallCFunction(0, r1);
5303 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5304
5305 CpuFeatures::Scope scope(VFP3);
5306 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5307 // Create this constant using mov/orr to avoid PC relative load.
5308 __ mov(r1, Operand(0x41000000));
5309 __ orr(r1, r1, Operand(0x300000));
5310 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5311 __ vmov(d7, r0, r1);
5312 // Move 0x4130000000000000 to VFP.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005313 __ mov(r0, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01005314 __ vmov(d8, r0, r1);
5315 // Subtract and store the result in the heap number.
5316 __ vsub(d7, d7, d8);
5317 __ sub(r0, r4, Operand(kHeapObjectTag));
5318 __ vstr(d7, r0, HeapNumber::kValueOffset);
5319 frame_->EmitPush(r4);
5320 } else {
5321 __ mov(r0, Operand(r4));
5322 __ PrepareCallCFunction(1, r1);
5323 __ CallCFunction(
5324 ExternalReference::fill_heap_number_with_random_function(), 1);
5325 frame_->EmitPush(r0);
5326 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005327}
5328
5329
Steve Blockd0582a62009-12-15 09:54:21 +00005330void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5331 ASSERT_EQ(2, args->length());
5332
5333 Load(args->at(0));
5334 Load(args->at(1));
5335
Andrei Popescu31002712010-02-23 13:46:05 +00005336 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005337 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005338 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00005339 frame_->EmitPush(r0);
5340}
5341
5342
Leon Clarkee46be812010-01-19 14:06:41 +00005343void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5344 ASSERT_EQ(3, args->length());
5345
5346 Load(args->at(0));
5347 Load(args->at(1));
5348 Load(args->at(2));
5349
Andrei Popescu31002712010-02-23 13:46:05 +00005350 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005351 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005352 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00005353 frame_->EmitPush(r0);
5354}
5355
5356
5357void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5358 ASSERT_EQ(2, args->length());
5359
5360 Load(args->at(0));
5361 Load(args->at(1));
5362
Leon Clarked91b9f72010-01-27 17:25:45 +00005363 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005364 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00005365 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005366 frame_->EmitPush(r0);
5367}
5368
5369
5370void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5371 ASSERT_EQ(4, args->length());
5372
5373 Load(args->at(0));
5374 Load(args->at(1));
5375 Load(args->at(2));
5376 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01005377 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005378 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005379 frame_->CallStub(&stub, 4);
5380 frame_->EmitPush(r0);
5381}
Leon Clarkee46be812010-01-19 14:06:41 +00005382
Steve Block6ded16b2010-05-10 14:33:55 +01005383
5384void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
5385 // No stub. This code only occurs a few times in regexp.js.
5386 const int kMaxInlineLength = 100;
5387 ASSERT_EQ(3, args->length());
5388 Load(args->at(0)); // Size of array, smi.
5389 Load(args->at(1)); // "index" property value.
5390 Load(args->at(2)); // "input" property value.
5391 {
5392 VirtualFrame::SpilledScope spilled_scope(frame_);
5393 Label slowcase;
5394 Label done;
5395 __ ldr(r1, MemOperand(sp, kPointerSize * 2));
5396 STATIC_ASSERT(kSmiTag == 0);
5397 STATIC_ASSERT(kSmiTagSize == 1);
5398 __ tst(r1, Operand(kSmiTagMask));
5399 __ b(ne, &slowcase);
5400 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
5401 __ b(hi, &slowcase);
5402 // Smi-tagging is equivalent to multiplying by 2.
5403 // Allocate RegExpResult followed by FixedArray with size in ebx.
5404 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
5405 // Elements: [Map][Length][..elements..]
5406 // Size of JSArray with two in-object properties and the header of a
5407 // FixedArray.
5408 int objects_size =
5409 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
5410 __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
5411 __ add(r2, r5, Operand(objects_size));
Kristian Monsen25f61362010-05-21 11:50:48 +01005412 __ AllocateInNewSpace(
5413 r2, // In: Size, in words.
5414 r0, // Out: Start of allocation (tagged).
5415 r3, // Scratch register.
5416 r4, // Scratch register.
5417 &slowcase,
5418 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Block6ded16b2010-05-10 14:33:55 +01005419 // r0: Start of allocated area, object-tagged.
5420 // r1: Number of elements in array, as smi.
5421 // r5: Number of elements, untagged.
5422
5423 // Set JSArray map to global.regexp_result_map().
5424 // Set empty properties FixedArray.
5425 // Set elements to point to FixedArray allocated right after the JSArray.
5426 // Interleave operations for better latency.
5427 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
5428 __ add(r3, r0, Operand(JSRegExpResult::kSize));
5429 __ mov(r4, Operand(Factory::empty_fixed_array()));
5430 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
5431 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
5432 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
5433 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
5434 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
5435
5436 // Set input, index and length fields from arguments.
5437 __ ldm(ia_w, sp, static_cast<RegList>(r2.bit() | r4.bit()));
5438 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
5439 __ add(sp, sp, Operand(kPointerSize));
5440 __ str(r4, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
5441 __ str(r2, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
5442
5443 // Fill out the elements FixedArray.
5444 // r0: JSArray, tagged.
5445 // r3: FixedArray, tagged.
5446 // r5: Number of elements in array, untagged.
5447
5448 // Set map.
5449 __ mov(r2, Operand(Factory::fixed_array_map()));
5450 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
5451 // Set FixedArray length.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005452 __ mov(r6, Operand(r5, LSL, kSmiTagSize));
5453 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005454 // Fill contents of fixed-array with the-hole.
5455 __ mov(r2, Operand(Factory::the_hole_value()));
5456 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5457 // Fill fixed array elements with hole.
5458 // r0: JSArray, tagged.
5459 // r2: the hole.
5460 // r3: Start of elements in FixedArray.
5461 // r5: Number of elements to fill.
5462 Label loop;
5463 __ tst(r5, Operand(r5));
5464 __ bind(&loop);
5465 __ b(le, &done); // Jump if r1 is negative or zero.
5466 __ sub(r5, r5, Operand(1), SetCC);
5467 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
5468 __ jmp(&loop);
5469
5470 __ bind(&slowcase);
5471 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
5472
5473 __ bind(&done);
5474 }
5475 frame_->Forget(3);
5476 frame_->EmitPush(r0);
5477}
5478
5479
Steve Block791712a2010-08-27 10:21:07 +01005480void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
5481 ASSERT_EQ(1, args->length());
5482
5483 Load(args->at(0));
5484 frame_->PopToR0();
5485 {
5486 VirtualFrame::SpilledScope spilled_scope(frame_);
5487
5488 Label done;
5489 Label call_runtime;
5490 __ BranchOnSmi(r0, &done);
5491
5492 // Load JSRegExp map into r1. Check that argument object has this map.
5493 // Arguments to this function should be results of calling RegExp exec,
5494 // which is either an unmodified JSRegExpResult or null. Anything not having
5495 // the unmodified JSRegExpResult map is returned unmodified.
5496 // This also ensures that elements are fast.
5497
5498 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
5499 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
5500 __ ldr(r1, ContextOperand(r1, Context::REGEXP_RESULT_MAP_INDEX));
5501 __ ldr(ip, FieldMemOperand(r0, HeapObject::kMapOffset));
5502 __ cmp(r1, Operand(ip));
5503 __ b(ne, &done);
5504
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005505 if (FLAG_debug_code) {
5506 __ LoadRoot(r2, Heap::kEmptyFixedArrayRootIndex);
5507 __ ldr(ip, FieldMemOperand(r0, JSObject::kPropertiesOffset));
5508 __ cmp(ip, r2);
5509 __ Check(eq, "JSRegExpResult: default map but non-empty properties.");
5510 }
5511
Steve Block791712a2010-08-27 10:21:07 +01005512 // All set, copy the contents to a new object.
5513 __ AllocateInNewSpace(JSRegExpResult::kSize,
5514 r2,
5515 r3,
5516 r4,
5517 &call_runtime,
5518 NO_ALLOCATION_FLAGS);
5519 // Store RegExpResult map as map of allocated object.
5520 ASSERT(JSRegExpResult::kSize == 6 * kPointerSize);
5521 // Copy all fields (map is already in r1) from (untagged) r0 to r2.
5522 // Change map of elements array (ends up in r4) to be a FixedCOWArray.
5523 __ bic(r0, r0, Operand(kHeapObjectTagMask));
5524 __ ldm(ib, r0, r3.bit() | r4.bit() | r5.bit() | r6.bit() | r7.bit());
5525 __ stm(ia, r2,
5526 r1.bit() | r3.bit() | r4.bit() | r5.bit() | r6.bit() | r7.bit());
Steve Block791712a2010-08-27 10:21:07 +01005527 ASSERT(JSRegExp::kElementsOffset == 2 * kPointerSize);
5528 // Check whether elements array is empty fixed array, and otherwise make
5529 // it copy-on-write (it never should be empty unless someone is messing
5530 // with the arguments to the runtime function).
5531 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
5532 __ add(r0, r2, Operand(kHeapObjectTag)); // Tag result and move it to r0.
5533 __ cmp(r4, ip);
5534 __ b(eq, &done);
5535 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
5536 __ str(ip, FieldMemOperand(r4, HeapObject::kMapOffset));
5537 __ b(&done);
5538 __ bind(&call_runtime);
5539 __ push(r0);
5540 __ CallRuntime(Runtime::kRegExpCloneResult, 1);
5541 __ bind(&done);
5542 }
5543 frame_->EmitPush(r0);
5544}
5545
5546
Steve Block6ded16b2010-05-10 14:33:55 +01005547class DeferredSearchCache: public DeferredCode {
5548 public:
5549 DeferredSearchCache(Register dst, Register cache, Register key)
5550 : dst_(dst), cache_(cache), key_(key) {
5551 set_comment("[ DeferredSearchCache");
5552 }
5553
5554 virtual void Generate();
5555
5556 private:
5557 Register dst_, cache_, key_;
5558};
5559
5560
5561void DeferredSearchCache::Generate() {
5562 __ Push(cache_, key_);
5563 __ CallRuntime(Runtime::kGetFromCache, 2);
Iain Merrick75681382010-08-19 15:07:18 +01005564 __ Move(dst_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01005565}
5566
5567
5568void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5569 ASSERT_EQ(2, args->length());
5570
5571 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5572 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5573
5574 Handle<FixedArray> jsfunction_result_caches(
5575 Top::global_context()->jsfunction_result_caches());
5576 if (jsfunction_result_caches->length() <= cache_id) {
5577 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005578 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005579 return;
5580 }
5581
5582 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005583
Iain Merrick75681382010-08-19 15:07:18 +01005584 frame_->PopToR1();
5585 frame_->SpillAll();
5586 Register key = r1; // Just poped to r1
5587 Register result = r0; // Free, as frame has just been spilled.
5588 Register scratch1 = VirtualFrame::scratch0();
5589 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005590
Iain Merrick75681382010-08-19 15:07:18 +01005591 __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX));
5592 __ ldr(scratch1,
5593 FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset));
5594 __ ldr(scratch1,
5595 ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5596 __ ldr(scratch1,
5597 FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id)));
Steve Block6ded16b2010-05-10 14:33:55 +01005598
Iain Merrick75681382010-08-19 15:07:18 +01005599 DeferredSearchCache* deferred =
5600 new DeferredSearchCache(result, scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01005601
5602 const int kFingerOffset =
5603 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005604 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +01005605 __ ldr(result, FieldMemOperand(scratch1, kFingerOffset));
5606 // result now holds finger offset as a smi.
5607 __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5608 // scratch2 now points to the start of fixed array elements.
5609 __ ldr(result,
5610 MemOperand(
5611 scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5612 // Note side effect of PreIndex: scratch2 now points to the key of the pair.
5613 __ cmp(key, result);
Steve Block6ded16b2010-05-10 14:33:55 +01005614 deferred->Branch(ne);
5615
Iain Merrick75681382010-08-19 15:07:18 +01005616 __ ldr(result, MemOperand(scratch2, kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005617
5618 deferred->BindExit();
Iain Merrick75681382010-08-19 15:07:18 +01005619 frame_->EmitPush(result);
Leon Clarkee46be812010-01-19 14:06:41 +00005620}
5621
5622
Andrei Popescu402d9372010-02-26 13:31:12 +00005623void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5624 ASSERT_EQ(args->length(), 1);
5625
5626 // Load the argument on the stack and jump to the runtime.
5627 Load(args->at(0));
5628
Steve Block6ded16b2010-05-10 14:33:55 +01005629 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005630 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005631 frame_->CallStub(&stub, 1);
5632 frame_->EmitPush(r0);
5633}
5634
5635
5636class DeferredSwapElements: public DeferredCode {
5637 public:
5638 DeferredSwapElements(Register object, Register index1, Register index2)
5639 : object_(object), index1_(index1), index2_(index2) {
5640 set_comment("[ DeferredSwapElements");
5641 }
5642
5643 virtual void Generate();
5644
5645 private:
5646 Register object_, index1_, index2_;
5647};
5648
5649
5650void DeferredSwapElements::Generate() {
5651 __ push(object_);
5652 __ push(index1_);
5653 __ push(index2_);
5654 __ CallRuntime(Runtime::kSwapElements, 3);
5655}
5656
5657
5658void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5659 Comment cmnt(masm_, "[ GenerateSwapElements");
5660
5661 ASSERT_EQ(3, args->length());
5662
5663 Load(args->at(0));
5664 Load(args->at(1));
5665 Load(args->at(2));
5666
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005667 VirtualFrame::SpilledScope spilled_scope(frame_);
5668
Steve Block6ded16b2010-05-10 14:33:55 +01005669 Register index2 = r2;
5670 Register index1 = r1;
5671 Register object = r0;
5672 Register tmp1 = r3;
5673 Register tmp2 = r4;
5674
5675 frame_->EmitPop(index2);
5676 frame_->EmitPop(index1);
5677 frame_->EmitPop(object);
5678
5679 DeferredSwapElements* deferred =
5680 new DeferredSwapElements(object, index1, index2);
5681
5682 // Fetch the map and check if array is in fast case.
5683 // Check that object doesn't require security checks and
5684 // has no indexed interceptor.
5685 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5686 deferred->Branch(lt);
5687 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5688 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
5689 deferred->Branch(nz);
5690
Iain Merrick75681382010-08-19 15:07:18 +01005691 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01005692 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5693 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5694 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5695 __ cmp(tmp2, ip);
5696 deferred->Branch(ne);
5697
5698 // Smi-tagging is equivalent to multiplying by 2.
5699 STATIC_ASSERT(kSmiTag == 0);
5700 STATIC_ASSERT(kSmiTagSize == 1);
5701
5702 // Check that both indices are smis.
5703 __ mov(tmp2, index1);
5704 __ orr(tmp2, tmp2, index2);
5705 __ tst(tmp2, Operand(kSmiTagMask));
5706 deferred->Branch(nz);
5707
5708 // Bring the offsets into the fixed array in tmp1 into index1 and
5709 // index2.
5710 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5711 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5712 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5713
5714 // Swap elements.
5715 Register tmp3 = object;
5716 object = no_reg;
5717 __ ldr(tmp3, MemOperand(tmp1, index1));
5718 __ ldr(tmp2, MemOperand(tmp1, index2));
5719 __ str(tmp3, MemOperand(tmp1, index2));
5720 __ str(tmp2, MemOperand(tmp1, index1));
5721
5722 Label done;
5723 __ InNewSpace(tmp1, tmp2, eq, &done);
5724 // Possible optimization: do a check that both values are Smis
5725 // (or them and test against Smi mask.)
5726
5727 __ mov(tmp2, tmp1);
5728 RecordWriteStub recordWrite1(tmp1, index1, tmp3);
5729 __ CallStub(&recordWrite1);
5730
5731 RecordWriteStub recordWrite2(tmp2, index2, tmp3);
5732 __ CallStub(&recordWrite2);
5733
5734 __ bind(&done);
5735
5736 deferred->BindExit();
5737 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5738 frame_->EmitPush(tmp1);
5739}
5740
5741
5742void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5743 Comment cmnt(masm_, "[ GenerateCallFunction");
5744
5745 ASSERT(args->length() >= 2);
5746
5747 int n_args = args->length() - 2; // for receiver and function.
5748 Load(args->at(0)); // receiver
5749 for (int i = 0; i < n_args; i++) {
5750 Load(args->at(i + 1));
5751 }
5752 Load(args->at(n_args + 1)); // function
5753 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005754 frame_->EmitPush(r0);
5755}
5756
5757
5758void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5759 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005760 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005761 if (CpuFeatures::IsSupported(VFP3)) {
5762 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5763 frame_->SpillAllButCopyTOSToR0();
5764 frame_->CallStub(&stub, 1);
5765 } else {
5766 frame_->CallRuntime(Runtime::kMath_sin, 1);
5767 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005768 frame_->EmitPush(r0);
5769}
5770
5771
5772void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5773 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005774 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005775 if (CpuFeatures::IsSupported(VFP3)) {
5776 TranscendentalCacheStub stub(TranscendentalCache::COS);
5777 frame_->SpillAllButCopyTOSToR0();
5778 frame_->CallStub(&stub, 1);
5779 } else {
5780 frame_->CallRuntime(Runtime::kMath_cos, 1);
5781 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005782 frame_->EmitPush(r0);
5783}
5784
5785
Steve Blocka7e24c12009-10-30 11:49:00 +00005786void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005787 ASSERT(args->length() == 2);
5788
5789 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005790 Load(args->at(0));
5791 Load(args->at(1));
5792 Register lhs = frame_->PopToRegister();
5793 Register rhs = frame_->PopToRegister(lhs);
5794 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005795 cc_reg_ = eq;
5796}
5797
5798
Ben Murdochbb769b22010-08-11 14:56:33 +01005799void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
5800 ASSERT(args->length() == 2);
5801
5802 // Load the two objects into registers and perform the comparison.
5803 Load(args->at(0));
5804 Load(args->at(1));
5805 Register right = frame_->PopToRegister();
5806 Register left = frame_->PopToRegister(right);
5807 Register tmp = frame_->scratch0();
5808 Register tmp2 = frame_->scratch1();
5809
5810 // Jumps to done must have the eq flag set if the test is successful
5811 // and clear if the test has failed.
5812 Label done;
5813
5814 // Fail if either is a non-HeapObject.
5815 __ cmp(left, Operand(right));
5816 __ b(eq, &done);
5817 __ and_(tmp, left, Operand(right));
5818 __ eor(tmp, tmp, Operand(kSmiTagMask));
5819 __ tst(tmp, Operand(kSmiTagMask));
5820 __ b(ne, &done);
5821 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
5822 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
5823 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
5824 __ b(ne, &done);
5825 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
5826 __ cmp(tmp, Operand(tmp2));
5827 __ b(ne, &done);
5828 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
5829 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
5830 __ cmp(tmp, tmp2);
5831 __ bind(&done);
5832 cc_reg_ = eq;
5833}
5834
5835
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005836void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
5837 ASSERT(args->length() == 1);
5838 Load(args->at(0));
5839 Register value = frame_->PopToRegister();
5840 Register tmp = frame_->scratch0();
5841 __ ldr(tmp, FieldMemOperand(value, String::kHashFieldOffset));
5842 __ tst(tmp, Operand(String::kContainsCachedArrayIndexMask));
5843 cc_reg_ = eq;
5844}
5845
5846
5847void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
5848 ASSERT(args->length() == 1);
5849 Load(args->at(0));
5850 Register value = frame_->PopToRegister();
5851
5852 __ ldr(value, FieldMemOperand(value, String::kHashFieldOffset));
5853 __ IndexFromHash(value, value);
5854 frame_->EmitPush(value);
5855}
5856
Ben Murdochbb769b22010-08-11 14:56:33 +01005857
Steve Blocka7e24c12009-10-30 11:49:00 +00005858void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5859#ifdef DEBUG
5860 int original_height = frame_->height();
5861#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005862 if (CheckForInlineRuntimeCall(node)) {
5863 ASSERT((has_cc() && frame_->height() == original_height) ||
5864 (!has_cc() && frame_->height() == original_height + 1));
5865 return;
5866 }
5867
5868 ZoneList<Expression*>* args = node->arguments();
5869 Comment cmnt(masm_, "[ CallRuntime");
5870 Runtime::Function* function = node->function();
5871
5872 if (function == NULL) {
5873 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005874 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005875 Register scratch = VirtualFrame::scratch0();
5876 __ ldr(scratch, GlobalObject());
5877 Register builtins = frame_->GetTOSRegister();
5878 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5879 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005880 }
5881
5882 // Push the arguments ("left-to-right").
5883 int arg_count = args->length();
5884 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005885 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005886 }
5887
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005888 VirtualFrame::SpilledScope spilled_scope(frame_);
5889
Steve Blocka7e24c12009-10-30 11:49:00 +00005890 if (function == NULL) {
5891 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005892 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005893 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
5894 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
5895 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5896 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005897 frame_->EmitPush(r0);
5898 } else {
5899 // Call the C runtime function.
5900 frame_->CallRuntime(function, arg_count);
5901 frame_->EmitPush(r0);
5902 }
Steve Block6ded16b2010-05-10 14:33:55 +01005903 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005904}
5905
5906
5907void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5908#ifdef DEBUG
5909 int original_height = frame_->height();
5910#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005911 Comment cmnt(masm_, "[ UnaryOperation");
5912
5913 Token::Value op = node->op();
5914
5915 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005916 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005917 // LoadCondition may (and usually does) leave a test and branch to
5918 // be emitted by the caller. In that case, negate the condition.
5919 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5920
5921 } else if (op == Token::DELETE) {
5922 Property* property = node->expression()->AsProperty();
5923 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5924 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005925 Load(property->obj());
5926 Load(property->key());
Steve Blockd0582a62009-12-15 09:54:21 +00005927 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005928 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005929
5930 } else if (variable != NULL) {
5931 Slot* slot = variable->slot();
5932 if (variable->is_global()) {
5933 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005934 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005935 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005936 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005937
5938 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5939 // lookup the context holding the named variable
5940 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005941 frame_->EmitPush(Operand(variable->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005942 frame_->CallRuntime(Runtime::kLookupContext, 2);
5943 // r0: context
5944 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005945 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005946 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005947 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005948
5949 } else {
5950 // Default: Result of deleting non-global, not dynamically
5951 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005952 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005953 }
5954
5955 } else {
5956 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005957 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005958 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005959 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005960 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005961
5962 } else if (op == Token::TYPEOF) {
5963 // Special case for loading the typeof expression; see comment on
5964 // LoadTypeofExpression().
5965 LoadTypeofExpression(node->expression());
5966 frame_->CallRuntime(Runtime::kTypeof, 1);
5967 frame_->EmitPush(r0); // r0 has result
5968
5969 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005970 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01005971 UnaryOverwriteMode overwrite =
5972 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5973
5974 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005975 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005976 switch (op) {
5977 case Token::NOT:
5978 case Token::DELETE:
5979 case Token::TYPEOF:
5980 UNREACHABLE(); // handled above
5981 break;
5982
5983 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005984 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005985 GenericUnaryOpStub stub(
5986 Token::SUB,
5987 overwrite,
5988 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005989 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005990 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005991 break;
5992 }
5993
5994 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005995 Register tos = frame_->PopToRegister();
5996 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005997 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005998 // Smi check.
5999 __ tst(tos, Operand(kSmiTagMask));
6000 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00006001
Steve Block8defd9f2010-07-08 12:39:36 +01006002 __ mvn(tos, Operand(tos));
6003 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
6004 frame_->EmitPush(tos);
6005 // The fast case is the first to jump to the continue label, so it gets
6006 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00006007 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00006008
Steve Block8defd9f2010-07-08 12:39:36 +01006009 not_smi_label.Bind();
6010 frame_->SpillAll();
6011 __ Move(r0, tos);
6012 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
6013 frame_->CallStub(&stub, 0);
6014 frame_->EmitPush(r0);
6015
Steve Blocka7e24c12009-10-30 11:49:00 +00006016 continue_label.Bind();
6017 break;
6018 }
6019
6020 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006021 frame_->Drop();
6022 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00006023 break;
6024
6025 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01006026 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00006027 // Smi check.
6028 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01006029 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006030 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01006031
Steve Blockd0582a62009-12-15 09:54:21 +00006032 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01006033 frame_->EmitPush(r0);
6034
Steve Blocka7e24c12009-10-30 11:49:00 +00006035 continue_label.Bind();
6036 break;
6037 }
6038 default:
6039 UNREACHABLE();
6040 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006041 }
6042 ASSERT(!has_valid_frame() ||
6043 (has_cc() && frame_->height() == original_height) ||
6044 (!has_cc() && frame_->height() == original_height + 1));
6045}
6046
6047
6048void CodeGenerator::VisitCountOperation(CountOperation* node) {
6049#ifdef DEBUG
6050 int original_height = frame_->height();
6051#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006052 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01006053 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006054
6055 bool is_postfix = node->is_postfix();
6056 bool is_increment = node->op() == Token::INC;
6057
6058 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
6059 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006060 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006061
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006062 if (!is_const && is_slot && type_info(var->slot()).IsSmi()) {
6063 // The type info declares that this variable is always a Smi. That
6064 // means it is a Smi both before and after the increment/decrement.
6065 // Lets make use of that to make a very minimal count.
6066 Reference target(this, node->expression(), !is_const);
6067 ASSERT(!target.is_illegal());
6068 target.GetValue(); // Pushes the value.
6069 Register value = frame_->PopToRegister();
6070 if (is_postfix) frame_->EmitPush(value);
6071 if (is_increment) {
6072 __ add(value, value, Operand(Smi::FromInt(1)));
6073 } else {
6074 __ sub(value, value, Operand(Smi::FromInt(1)));
6075 }
6076 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006077 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006078 if (is_postfix) frame_->Pop();
6079 ASSERT_EQ(original_height + 1, frame_->height());
6080 return;
6081 }
6082
6083 // If it's a postfix expression and its result is not ignored and the
6084 // reference is non-trivial, then push a placeholder on the stack now
6085 // to hold the result of the expression.
6086 bool placeholder_pushed = false;
6087 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006088 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006089 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00006090 }
6091
Leon Clarked91b9f72010-01-27 17:25:45 +00006092 // A constant reference is not saved to, so a constant reference is not a
6093 // compound assignment reference.
6094 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00006095 if (target.is_illegal()) {
6096 // Spoof the virtual frame to have the expected height (one higher
6097 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006098 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01006099 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006100 return;
6101 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006102
Kristian Monsen25f61362010-05-21 11:50:48 +01006103 // This pushes 0, 1 or 2 words on the object to be used later when updating
6104 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01006105 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006106
6107 JumpTarget slow;
6108 JumpTarget exit;
6109
Kristian Monsen25f61362010-05-21 11:50:48 +01006110 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00006111
6112 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006113 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006114 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006115 } else if (is_postfix) {
6116 frame_->EmitPush(value);
6117 __ mov(VirtualFrame::scratch0(), value);
6118 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006119 }
6120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006121 // Check for smi operand.
6122 __ tst(value, Operand(kSmiTagMask));
6123 slow.Branch(ne);
6124
Steve Blocka7e24c12009-10-30 11:49:00 +00006125 // Perform optimistic increment/decrement.
6126 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006127 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006128 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006129 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006130 }
6131
6132 // If the increment/decrement didn't overflow, we're done.
6133 exit.Branch(vc);
6134
6135 // Revert optimistic increment/decrement.
6136 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006137 __ sub(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00006138 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006139 __ add(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00006140 }
6141
Kristian Monsen25f61362010-05-21 11:50:48 +01006142 // Slow case: Convert to number. At this point the
6143 // value to be incremented is in the value register..
Steve Blocka7e24c12009-10-30 11:49:00 +00006144 slow.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01006145
6146 // Convert the operand to a number.
6147 frame_->EmitPush(value);
6148
Steve Blocka7e24c12009-10-30 11:49:00 +00006149 {
Kristian Monsen25f61362010-05-21 11:50:48 +01006150 VirtualFrame::SpilledScope spilled(frame_);
Steve Blockd0582a62009-12-15 09:54:21 +00006151 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01006152
6153 if (is_postfix) {
6154 // Postfix: store to result (on the stack).
6155 __ str(r0, frame_->ElementAt(target.size()));
6156 }
6157
6158 // Compute the new value.
6159 frame_->EmitPush(r0);
6160 frame_->EmitPush(Operand(Smi::FromInt(1)));
6161 if (is_increment) {
6162 frame_->CallRuntime(Runtime::kNumberAdd, 2);
6163 } else {
6164 frame_->CallRuntime(Runtime::kNumberSub, 2);
6165 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006166 }
6167
Kristian Monsen25f61362010-05-21 11:50:48 +01006168 __ Move(value, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00006169 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01006170 // At this point the answer is in the value register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006171 exit.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01006172 frame_->EmitPush(value);
6173 // Set the target with the result, leaving the result on
6174 // top of the stack. Removes the target from the stack if
6175 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01006176 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00006177 }
6178
6179 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01006180 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01006181 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006182}
6183
6184
Steve Block6ded16b2010-05-10 14:33:55 +01006185void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006186 // According to ECMA-262 section 11.11, page 58, the binary logical
6187 // operators must yield the result of one of the two expressions
6188 // before any ToBoolean() conversions. This means that the value
6189 // produced by a && or || operator is not necessarily a boolean.
6190
6191 // NOTE: If the left hand side produces a materialized value (not in
6192 // the CC register), we force the right hand side to do the
6193 // same. This is necessary because we may have to branch to the exit
6194 // after evaluating the left hand side (due to the shortcut
6195 // semantics), but the compiler must (statically) know if the result
6196 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01006197 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006198 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006199 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006200 if (has_valid_frame() && !has_cc()) {
6201 // The left-hand side result is on top of the virtual frame.
6202 JumpTarget pop_and_continue;
6203 JumpTarget exit;
6204
Leon Clarkef7060e22010-06-03 12:02:55 +01006205 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006206 // Avoid popping the result if it converts to 'false' using the
6207 // standard ToBoolean() conversion as described in ECMA-262,
6208 // section 9.2, page 30.
6209 ToBoolean(&pop_and_continue, &exit);
6210 Branch(false, &exit);
6211
6212 // Pop the result of evaluating the first part.
6213 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006214 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006215
6216 // Evaluate right side expression.
6217 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006218 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006219
6220 // Exit (always with a materialized value).
6221 exit.Bind();
6222 } else if (has_cc() || is_true.is_linked()) {
6223 // The left-hand side is either (a) partially compiled to
6224 // control flow with a final branch left to emit or (b) fully
6225 // compiled to control flow and possibly true.
6226 if (has_cc()) {
6227 Branch(false, false_target());
6228 }
6229 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006230 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006231 } else {
6232 // Nothing to do.
6233 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
6234 }
6235
Steve Block6ded16b2010-05-10 14:33:55 +01006236 } else {
6237 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006238 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006239 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006240 if (has_valid_frame() && !has_cc()) {
6241 // The left-hand side result is on top of the virtual frame.
6242 JumpTarget pop_and_continue;
6243 JumpTarget exit;
6244
Leon Clarkef7060e22010-06-03 12:02:55 +01006245 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006246 // Avoid popping the result if it converts to 'true' using the
6247 // standard ToBoolean() conversion as described in ECMA-262,
6248 // section 9.2, page 30.
6249 ToBoolean(&exit, &pop_and_continue);
6250 Branch(true, &exit);
6251
6252 // Pop the result of evaluating the first part.
6253 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006254 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006255
6256 // Evaluate right side expression.
6257 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006258 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006259
6260 // Exit (always with a materialized value).
6261 exit.Bind();
6262 } else if (has_cc() || is_false.is_linked()) {
6263 // The left-hand side is either (a) partially compiled to
6264 // control flow with a final branch left to emit or (b) fully
6265 // compiled to control flow and possibly false.
6266 if (has_cc()) {
6267 Branch(true, true_target());
6268 }
6269 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006270 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006271 } else {
6272 // Nothing to do.
6273 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
6274 }
Steve Block6ded16b2010-05-10 14:33:55 +01006275 }
6276}
Steve Blocka7e24c12009-10-30 11:49:00 +00006277
Steve Block6ded16b2010-05-10 14:33:55 +01006278
6279void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
6280#ifdef DEBUG
6281 int original_height = frame_->height();
6282#endif
6283 Comment cmnt(masm_, "[ BinaryOperation");
6284
6285 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01006286 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006287 } else {
6288 // Optimize for the case where (at least) one of the expressions
6289 // is a literal small integer.
6290 Literal* lliteral = node->left()->AsLiteral();
6291 Literal* rliteral = node->right()->AsLiteral();
6292 // NOTE: The code below assumes that the slow cases (calls to runtime)
6293 // never return a constant/immutable object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006294 bool overwrite_left = node->left()->ResultOverwriteAllowed();
6295 bool overwrite_right = node->right()->ResultOverwriteAllowed();
Steve Blocka7e24c12009-10-30 11:49:00 +00006296
6297 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006298 VirtualFrame::RegisterAllocationScope scope(this);
6299 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006300 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006301 SmiOperation(node->op(),
6302 rliteral->handle(),
6303 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006304 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006305 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006306 VirtualFrame::RegisterAllocationScope scope(this);
6307 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006308 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006309 SmiOperation(node->op(),
6310 lliteral->handle(),
6311 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006312 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006313 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006314 GenerateInlineSmi inline_smi =
6315 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
6316 if (lliteral != NULL) {
6317 ASSERT(!lliteral->handle()->IsSmi());
6318 inline_smi = DONT_GENERATE_INLINE_SMI;
6319 }
6320 if (rliteral != NULL) {
6321 ASSERT(!rliteral->handle()->IsSmi());
6322 inline_smi = DONT_GENERATE_INLINE_SMI;
6323 }
Steve Block6ded16b2010-05-10 14:33:55 +01006324 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006325 OverwriteMode overwrite_mode = NO_OVERWRITE;
6326 if (overwrite_left) {
6327 overwrite_mode = OVERWRITE_LEFT;
6328 } else if (overwrite_right) {
6329 overwrite_mode = OVERWRITE_RIGHT;
6330 }
Steve Block6ded16b2010-05-10 14:33:55 +01006331 Load(node->left());
6332 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006333 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006334 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006335 }
6336 ASSERT(!has_valid_frame() ||
6337 (has_cc() && frame_->height() == original_height) ||
6338 (!has_cc() && frame_->height() == original_height + 1));
6339}
6340
6341
6342void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6343#ifdef DEBUG
6344 int original_height = frame_->height();
6345#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006346 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01006347 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006348}
6349
6350
6351void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6352#ifdef DEBUG
6353 int original_height = frame_->height();
6354#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006355 Comment cmnt(masm_, "[ CompareOperation");
6356
Steve Block6ded16b2010-05-10 14:33:55 +01006357 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
6358
Steve Blocka7e24c12009-10-30 11:49:00 +00006359 // Get the expressions from the node.
6360 Expression* left = node->left();
6361 Expression* right = node->right();
6362 Token::Value op = node->op();
6363
Steve Blocka7e24c12009-10-30 11:49:00 +00006364 // To make typeof testing for natives implemented in JavaScript really
6365 // efficient, we generate special code for expressions of the form:
6366 // 'typeof <expression> == <string>'.
6367 UnaryOperation* operation = left->AsUnaryOperation();
6368 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6369 (operation != NULL && operation->op() == Token::TYPEOF) &&
6370 (right->AsLiteral() != NULL &&
6371 right->AsLiteral()->handle()->IsString())) {
6372 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6373
Steve Block6ded16b2010-05-10 14:33:55 +01006374 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006375 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01006376 Register tos = frame_->PopToRegister();
6377
Steve Block6ded16b2010-05-10 14:33:55 +01006378 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006379
6380 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006381 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006382 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006383 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006384 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006385 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006386 cc_reg_ = eq;
6387
6388 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006389 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006390 false_target()->Branch(eq);
6391
Steve Block6ded16b2010-05-10 14:33:55 +01006392 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006393
6394 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01006395 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6396 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6397 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006398 false_target()->Branch(eq);
6399
Steve Block6ded16b2010-05-10 14:33:55 +01006400 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6401 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006402 cc_reg_ = lt;
6403
6404 } else if (check->Equals(Heap::boolean_symbol())) {
6405 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006406 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006407 true_target()->Branch(eq);
6408 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006409 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006410 cc_reg_ = eq;
6411
6412 } else if (check->Equals(Heap::undefined_symbol())) {
6413 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006414 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006415 true_target()->Branch(eq);
6416
Steve Block6ded16b2010-05-10 14:33:55 +01006417 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006418 false_target()->Branch(eq);
6419
6420 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006421 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6422 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6423 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6424 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006425
6426 cc_reg_ = eq;
6427
6428 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006429 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006430 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006431 Register map_reg = scratch;
6432 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006433 true_target()->Branch(eq);
6434 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01006435 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006436 cc_reg_ = eq;
6437
6438 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006439 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006440 false_target()->Branch(eq);
6441
Steve Blocka7e24c12009-10-30 11:49:00 +00006442 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006443 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006444 true_target()->Branch(eq);
6445
Steve Block6ded16b2010-05-10 14:33:55 +01006446 Register map_reg = scratch;
6447 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006448 false_target()->Branch(eq);
6449
Steve Blocka7e24c12009-10-30 11:49:00 +00006450 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006451 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
6452 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6453 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006454 false_target()->Branch(eq);
6455
Steve Block6ded16b2010-05-10 14:33:55 +01006456 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
6457 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006458 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01006459 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006460 cc_reg_ = le;
6461
6462 } else {
6463 // Uncommon case: typeof testing against a string literal that is
6464 // never returned from the typeof operator.
6465 false_target()->Jump();
6466 }
6467 ASSERT(!has_valid_frame() ||
6468 (has_cc() && frame_->height() == original_height));
6469 return;
6470 }
6471
6472 switch (op) {
6473 case Token::EQ:
6474 Comparison(eq, left, right, false);
6475 break;
6476
6477 case Token::LT:
6478 Comparison(lt, left, right);
6479 break;
6480
6481 case Token::GT:
6482 Comparison(gt, left, right);
6483 break;
6484
6485 case Token::LTE:
6486 Comparison(le, left, right);
6487 break;
6488
6489 case Token::GTE:
6490 Comparison(ge, left, right);
6491 break;
6492
6493 case Token::EQ_STRICT:
6494 Comparison(eq, left, right, true);
6495 break;
6496
6497 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006498 Load(left);
6499 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006500 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006501 frame_->EmitPush(r0);
6502 break;
6503 }
6504
6505 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006506 Load(left);
6507 Load(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00006508 InstanceofStub stub;
6509 frame_->CallStub(&stub, 2);
6510 // At this point if instanceof succeeded then r0 == 0.
6511 __ tst(r0, Operand(r0));
6512 cc_reg_ = eq;
6513 break;
6514 }
6515
6516 default:
6517 UNREACHABLE();
6518 }
6519 ASSERT((has_cc() && frame_->height() == original_height) ||
6520 (!has_cc() && frame_->height() == original_height + 1));
6521}
6522
6523
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006524void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
6525#ifdef DEBUG
6526 int original_height = frame_->height();
6527#endif
6528 Comment cmnt(masm_, "[ CompareToNull");
6529
6530 Load(node->expression());
6531 Register tos = frame_->PopToRegister();
6532 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6533 __ cmp(tos, ip);
6534
6535 // The 'null' value is only equal to 'undefined' if using non-strict
6536 // comparisons.
6537 if (!node->is_strict()) {
6538 true_target()->Branch(eq);
6539 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6540 __ cmp(tos, Operand(ip));
6541 true_target()->Branch(eq);
6542
6543 __ tst(tos, Operand(kSmiTagMask));
6544 false_target()->Branch(eq);
6545
6546 // It can be an undetectable object.
6547 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6548 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
6549 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6550 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
6551 }
6552
6553 cc_reg_ = eq;
6554 ASSERT(has_cc() && frame_->height() == original_height);
6555}
6556
6557
Steve Block6ded16b2010-05-10 14:33:55 +01006558class DeferredReferenceGetNamedValue: public DeferredCode {
6559 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006560 explicit DeferredReferenceGetNamedValue(Register receiver,
6561 Handle<String> name)
6562 : receiver_(receiver), name_(name) {
Steve Block6ded16b2010-05-10 14:33:55 +01006563 set_comment("[ DeferredReferenceGetNamedValue");
6564 }
6565
6566 virtual void Generate();
6567
6568 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006569 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006570 Handle<String> name_;
6571};
6572
6573
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006574// Convention for this is that on entry the receiver is in a register that
6575// is not used by the stack. On exit the answer is found in that same
6576// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006577void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006578#ifdef DEBUG
6579 int expected_height = frame_state()->frame()->height();
6580#endif
6581 VirtualFrame copied_frame(*frame_state()->frame());
6582 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006583
Steve Block6ded16b2010-05-10 14:33:55 +01006584 Register scratch1 = VirtualFrame::scratch0();
6585 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006586 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01006587 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
6588 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
6589
Leon Clarkef7060e22010-06-03 12:02:55 +01006590 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6591 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006592 __ mov(r2, Operand(name_));
6593
6594 // The rest of the instructions in the deferred code must be together.
6595 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6596 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6597 __ Call(ic, RelocInfo::CODE_TARGET);
6598 // The call must be followed by a nop(1) instruction to indicate that the
6599 // in-object has been inlined.
6600 __ nop(PROPERTY_ACCESS_INLINED);
6601
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006602 // At this point the answer is in r0. We move it to the expected register
6603 // if necessary.
6604 __ Move(receiver_, r0);
6605
6606 // Now go back to the frame that we entered with. This will not overwrite
6607 // the receiver register since that register was not in use when we came
6608 // in. The instructions emitted by this merge are skipped over by the
6609 // inline load patching mechanism when looking for the branch instruction
6610 // that tells it where the code to patch is.
6611 copied_frame.MergeTo(frame_state()->frame());
6612
Steve Block6ded16b2010-05-10 14:33:55 +01006613 // Block the constant pool for one more instruction after leaving this
6614 // constant pool block scope to include the branch instruction ending the
6615 // deferred code.
6616 __ BlockConstPoolFor(1);
6617 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006618 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006619}
6620
6621
6622class DeferredReferenceGetKeyedValue: public DeferredCode {
6623 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006624 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6625 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006626 set_comment("[ DeferredReferenceGetKeyedValue");
6627 }
6628
6629 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006630
6631 private:
6632 Register key_;
6633 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006634};
6635
6636
Steve Block8defd9f2010-07-08 12:39:36 +01006637// Takes key and register in r0 and r1 or vice versa. Returns result
6638// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006639void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006640 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6641 (key_.is(r1) && receiver_.is(r0)));
6642
Steve Block8defd9f2010-07-08 12:39:36 +01006643 VirtualFrame copied_frame(*frame_state()->frame());
6644 copied_frame.SpillAll();
6645
Steve Block6ded16b2010-05-10 14:33:55 +01006646 Register scratch1 = VirtualFrame::scratch0();
6647 Register scratch2 = VirtualFrame::scratch1();
6648 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
6649 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
6650
Kristian Monsen25f61362010-05-21 11:50:48 +01006651 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6652 // convention.
6653 if (key_.is(r1)) {
6654 __ Swap(r0, r1, ip);
6655 }
6656
Steve Block6ded16b2010-05-10 14:33:55 +01006657 // The rest of the instructions in the deferred code must be together.
6658 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006659 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01006660 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6661 __ Call(ic, RelocInfo::CODE_TARGET);
6662 // The call must be followed by a nop instruction to indicate that the
6663 // keyed load has been inlined.
6664 __ nop(PROPERTY_ACCESS_INLINED);
6665
Steve Block8defd9f2010-07-08 12:39:36 +01006666 // Now go back to the frame that we entered with. This will not overwrite
6667 // the receiver or key registers since they were not in use when we came
6668 // in. The instructions emitted by this merge are skipped over by the
6669 // inline load patching mechanism when looking for the branch instruction
6670 // that tells it where the code to patch is.
6671 copied_frame.MergeTo(frame_state()->frame());
6672
Steve Block6ded16b2010-05-10 14:33:55 +01006673 // Block the constant pool for one more instruction after leaving this
6674 // constant pool block scope to include the branch instruction ending the
6675 // deferred code.
6676 __ BlockConstPoolFor(1);
6677 }
6678}
6679
6680
6681class DeferredReferenceSetKeyedValue: public DeferredCode {
6682 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006683 DeferredReferenceSetKeyedValue(Register value,
6684 Register key,
6685 Register receiver)
6686 : value_(value), key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006687 set_comment("[ DeferredReferenceSetKeyedValue");
6688 }
6689
6690 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006691
6692 private:
6693 Register value_;
6694 Register key_;
6695 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006696};
6697
6698
6699void DeferredReferenceSetKeyedValue::Generate() {
6700 Register scratch1 = VirtualFrame::scratch0();
6701 Register scratch2 = VirtualFrame::scratch1();
6702 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
6703 __ IncrementCounter(
6704 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
6705
Leon Clarkef7060e22010-06-03 12:02:55 +01006706 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6707 // calling convention.
6708 if (value_.is(r1)) {
6709 __ Swap(r0, r1, ip);
6710 }
6711 ASSERT(receiver_.is(r2));
6712
Steve Block6ded16b2010-05-10 14:33:55 +01006713 // The rest of the instructions in the deferred code must be together.
6714 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006715 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6716 // r1 and r2.
Steve Block6ded16b2010-05-10 14:33:55 +01006717 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
6718 __ Call(ic, RelocInfo::CODE_TARGET);
6719 // The call must be followed by a nop instruction to indicate that the
6720 // keyed store has been inlined.
6721 __ nop(PROPERTY_ACCESS_INLINED);
6722
6723 // Block the constant pool for one more instruction after leaving this
6724 // constant pool block scope to include the branch instruction ending the
6725 // deferred code.
6726 __ BlockConstPoolFor(1);
6727 }
6728}
6729
6730
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006731class DeferredReferenceSetNamedValue: public DeferredCode {
6732 public:
6733 DeferredReferenceSetNamedValue(Register value,
6734 Register receiver,
6735 Handle<String> name)
6736 : value_(value), receiver_(receiver), name_(name) {
6737 set_comment("[ DeferredReferenceSetNamedValue");
6738 }
6739
6740 virtual void Generate();
6741
6742 private:
6743 Register value_;
6744 Register receiver_;
6745 Handle<String> name_;
6746};
6747
6748
6749// Takes value in r0, receiver in r1 and returns the result (the
6750// value) in r0.
6751void DeferredReferenceSetNamedValue::Generate() {
6752 // Record the entry frame and spill.
6753 VirtualFrame copied_frame(*frame_state()->frame());
6754 copied_frame.SpillAll();
6755
6756 // Ensure value in r0, receiver in r1 to match store ic calling
6757 // convention.
6758 ASSERT(value_.is(r0) && receiver_.is(r1));
6759 __ mov(r2, Operand(name_));
6760
6761 // The rest of the instructions in the deferred code must be together.
6762 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6763 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6764 // r1 and r2.
6765 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
6766 __ Call(ic, RelocInfo::CODE_TARGET);
6767 // The call must be followed by a nop instruction to indicate that the
6768 // named store has been inlined.
6769 __ nop(PROPERTY_ACCESS_INLINED);
6770
6771 // Go back to the frame we entered with. The instructions
6772 // generated by this merge are skipped over by the inline store
6773 // patching mechanism when looking for the branch instruction that
6774 // tells it where the code to patch is.
6775 copied_frame.MergeTo(frame_state()->frame());
6776
6777 // Block the constant pool for one more instruction after leaving this
6778 // constant pool block scope to include the branch instruction ending the
6779 // deferred code.
6780 __ BlockConstPoolFor(1);
6781 }
6782}
6783
6784
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006785// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006786void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6787 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6788 Comment cmnt(masm(), "[ Load from named Property");
6789 // Setup the name register and call load IC.
6790 frame_->CallLoadIC(name,
6791 is_contextual
6792 ? RelocInfo::CODE_TARGET_CONTEXT
6793 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006794 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006795 } else {
6796 // Inline the in-object property case.
6797 Comment cmnt(masm(), "[ Inlined named property load");
6798
6799 // Counter will be decremented in the deferred code. Placed here to avoid
6800 // having it in the instruction stream below where patching will occur.
6801 __ IncrementCounter(&Counters::named_load_inline, 1,
6802 frame_->scratch0(), frame_->scratch1());
6803
6804 // The following instructions are the inlined load of an in-object property.
6805 // Parts of this code is patched, so the exact instructions generated needs
6806 // to be fixed. Therefore the instruction pool is blocked when generating
6807 // this code
6808
6809 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006810 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006811
6812 DeferredReferenceGetNamedValue* deferred =
Leon Clarkef7060e22010-06-03 12:02:55 +01006813 new DeferredReferenceGetNamedValue(receiver, name);
Steve Block6ded16b2010-05-10 14:33:55 +01006814
6815#ifdef DEBUG
6816 int kInlinedNamedLoadInstructions = 7;
6817 Label check_inlined_codesize;
6818 masm_->bind(&check_inlined_codesize);
6819#endif
6820
6821 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6822 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01006823 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006824 deferred->Branch(eq);
6825
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006826 Register scratch = VirtualFrame::scratch0();
6827 Register scratch2 = VirtualFrame::scratch1();
6828
Steve Block6ded16b2010-05-10 14:33:55 +01006829 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006830 // code. Therefore we can't use a LoadRoot call.
6831 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6832 __ mov(scratch2, Operand(Factory::null_value()));
6833 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006834 deferred->Branch(ne);
6835
6836 // Initially use an invalid index. The index will be patched by the
6837 // inline cache code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006838 __ ldr(receiver, MemOperand(receiver, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01006839
6840 // Make sure that the expected number of instructions are generated.
6841 ASSERT_EQ(kInlinedNamedLoadInstructions,
6842 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6843 }
6844
6845 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006846 // At this point the receiver register has the result, either from the
6847 // deferred code or from the inlined code.
6848 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006849 }
6850}
6851
6852
6853void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6854#ifdef DEBUG
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006855 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
Steve Block6ded16b2010-05-10 14:33:55 +01006856#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006857
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006858 Result result;
6859 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6860 frame()->CallStoreIC(name, is_contextual);
6861 } else {
6862 // Inline the in-object property case.
6863 JumpTarget slow, done;
6864
6865 // Get the value and receiver from the stack.
6866 frame()->PopToR0();
6867 Register value = r0;
6868 frame()->PopToR1();
6869 Register receiver = r1;
6870
6871 DeferredReferenceSetNamedValue* deferred =
6872 new DeferredReferenceSetNamedValue(value, receiver, name);
6873
6874 // Check that the receiver is a heap object.
6875 __ tst(receiver, Operand(kSmiTagMask));
6876 deferred->Branch(eq);
6877
6878 // The following instructions are the part of the inlined
6879 // in-object property store code which can be patched. Therefore
6880 // the exact number of instructions generated must be fixed, so
6881 // the constant pool is blocked while generating this code.
6882 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6883 Register scratch0 = VirtualFrame::scratch0();
6884 Register scratch1 = VirtualFrame::scratch1();
6885
6886 // Check the map. Initially use an invalid map to force a
6887 // failure. The map check will be patched in the runtime system.
6888 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
6889
6890#ifdef DEBUG
6891 Label check_inlined_codesize;
6892 masm_->bind(&check_inlined_codesize);
6893#endif
6894 __ mov(scratch0, Operand(Factory::null_value()));
6895 __ cmp(scratch0, scratch1);
6896 deferred->Branch(ne);
6897
6898 int offset = 0;
6899 __ str(value, MemOperand(receiver, offset));
6900
6901 // Update the write barrier and record its size. We do not use
6902 // the RecordWrite macro here because we want the offset
6903 // addition instruction first to make it easy to patch.
6904 Label record_write_start, record_write_done;
6905 __ bind(&record_write_start);
6906 // Add offset into the object.
6907 __ add(scratch0, receiver, Operand(offset));
6908 // Test that the object is not in the new space. We cannot set
6909 // region marks for new space pages.
6910 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
6911 // Record the actual write.
6912 __ RecordWriteHelper(receiver, scratch0, scratch1);
6913 __ bind(&record_write_done);
6914 // Clobber all input registers when running with the debug-code flag
6915 // turned on to provoke errors.
6916 if (FLAG_debug_code) {
6917 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
6918 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
6919 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
6920 }
6921 // Check that this is the first inlined write barrier or that
6922 // this inlined write barrier has the same size as all the other
6923 // inlined write barriers.
6924 ASSERT((inlined_write_barrier_size_ == -1) ||
6925 (inlined_write_barrier_size_ ==
6926 masm()->InstructionsGeneratedSince(&record_write_start)));
6927 inlined_write_barrier_size_ =
6928 masm()->InstructionsGeneratedSince(&record_write_start);
6929
6930 // Make sure that the expected number of instructions are generated.
6931 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
6932 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
6933 }
6934 deferred->BindExit();
6935 }
6936 ASSERT_EQ(expected_height, frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006937}
6938
6939
6940void CodeGenerator::EmitKeyedLoad() {
6941 if (loop_nesting() == 0) {
6942 Comment cmnt(masm_, "[ Load from keyed property");
6943 frame_->CallKeyedLoadIC();
6944 } else {
6945 // Inline the keyed load.
6946 Comment cmnt(masm_, "[ Inlined load from keyed property");
6947
6948 // Counter will be decremented in the deferred code. Placed here to avoid
6949 // having it in the instruction stream below where patching will occur.
6950 __ IncrementCounter(&Counters::keyed_load_inline, 1,
6951 frame_->scratch0(), frame_->scratch1());
6952
Kristian Monsen25f61362010-05-21 11:50:48 +01006953 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006954 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006955 Register key = frame_->PopToRegister();
6956 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01006957
Kristian Monsen25f61362010-05-21 11:50:48 +01006958 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01006959 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01006960 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006961
6962 // Check that the receiver is a heap object.
6963 __ tst(receiver, Operand(kSmiTagMask));
6964 deferred->Branch(eq);
6965
6966 // The following instructions are the part of the inlined load keyed
6967 // property code which can be patched. Therefore the exact number of
6968 // instructions generated need to be fixed, so the constant pool is blocked
6969 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01006970 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6971 Register scratch1 = VirtualFrame::scratch0();
6972 Register scratch2 = VirtualFrame::scratch1();
6973 // Check the map. The null map used below is patched by the inline cache
6974 // code.
6975 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006976
6977 // Check that the key is a smi.
6978 if (!key_is_known_smi) {
6979 __ tst(key, Operand(kSmiTagMask));
6980 deferred->Branch(ne);
6981 }
6982
Kristian Monsen25f61362010-05-21 11:50:48 +01006983#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006984 Label check_inlined_codesize;
6985 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01006986#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006987 __ mov(scratch2, Operand(Factory::null_value()));
6988 __ cmp(scratch1, scratch2);
6989 deferred->Branch(ne);
6990
Iain Merrick75681382010-08-19 15:07:18 +01006991 // Get the elements array from the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01006992 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01006993 __ AssertFastElements(scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01006994
6995 // Check that key is within bounds. Use unsigned comparison to handle
6996 // negative keys.
6997 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006998 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01006999 deferred->Branch(ls); // Unsigned less equal.
7000
7001 // Load and check that the result is not the hole (key is a smi).
7002 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
7003 __ add(scratch1,
7004 scratch1,
7005 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01007006 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01007007 MemOperand(scratch1, key, LSL,
7008 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01007009 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01007010 deferred->Branch(eq);
7011
Kristian Monsen25f61362010-05-21 11:50:48 +01007012 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007013 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01007014 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01007015 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7016 }
7017
7018 deferred->BindExit();
7019 }
7020}
7021
7022
Steve Block8defd9f2010-07-08 12:39:36 +01007023void CodeGenerator::EmitKeyedStore(StaticType* key_type,
7024 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01007025 // Generate inlined version of the keyed store if the code is in a loop
7026 // and the key is likely to be a smi.
7027 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7028 // Inline the keyed store.
7029 Comment cmnt(masm_, "[ Inlined store to keyed property");
7030
Leon Clarkef7060e22010-06-03 12:02:55 +01007031 Register scratch1 = VirtualFrame::scratch0();
7032 Register scratch2 = VirtualFrame::scratch1();
7033 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01007034
7035 // Counter will be decremented in the deferred code. Placed here to avoid
7036 // having it in the instruction stream below where patching will occur.
7037 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01007038 scratch1, scratch2);
7039
Steve Block8defd9f2010-07-08 12:39:36 +01007040
7041
Leon Clarkef7060e22010-06-03 12:02:55 +01007042 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01007043 bool value_is_harmless = frame_->KnownSmiAt(0);
7044 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7045 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01007046 Register value = frame_->PopToRegister();
7047 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01007048 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01007049 Register receiver = r2;
7050 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01007051
7052#ifdef DEBUG
7053 bool we_remembered_the_write_barrier = value_is_harmless;
7054#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01007055
7056 // The deferred code expects value, key and receiver in registers.
7057 DeferredReferenceSetKeyedValue* deferred =
7058 new DeferredReferenceSetKeyedValue(value, key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007059
7060 // Check that the value is a smi. As this inlined code does not set the
7061 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01007062 if (!value_is_harmless) {
7063 // If the value is not likely to be a Smi then let's test the fixed array
7064 // for new space instead. See below.
7065 if (wb_info == LIKELY_SMI) {
7066 __ tst(value, Operand(kSmiTagMask));
7067 deferred->Branch(ne);
7068#ifdef DEBUG
7069 we_remembered_the_write_barrier = true;
7070#endif
7071 }
7072 }
Steve Block6ded16b2010-05-10 14:33:55 +01007073
Steve Block8defd9f2010-07-08 12:39:36 +01007074 if (!key_is_smi) {
7075 // Check that the key is a smi.
7076 __ tst(key, Operand(kSmiTagMask));
7077 deferred->Branch(ne);
7078 }
Steve Block6ded16b2010-05-10 14:33:55 +01007079
7080 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01007081 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007082 deferred->Branch(eq);
7083
7084 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01007085 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01007086 deferred->Branch(ne);
7087
7088 // Check that the key is within bounds. Both the key and the length of
7089 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01007090 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
7091 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007092 deferred->Branch(ls); // Unsigned less equal.
7093
Steve Block8defd9f2010-07-08 12:39:36 +01007094 // Get the elements array from the receiver.
7095 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7096 if (!value_is_harmless && wb_info != LIKELY_SMI) {
7097 Label ok;
7098 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
7099 __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
7100 __ tst(value, Operand(kSmiTagMask), ne);
7101 deferred->Branch(ne);
7102#ifdef DEBUG
7103 we_remembered_the_write_barrier = true;
7104#endif
7105 }
7106 // Check that the elements array is not a dictionary.
7107 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007108 // The following instructions are the part of the inlined store keyed
7109 // property code which can be patched. Therefore the exact number of
7110 // instructions generated need to be fixed, so the constant pool is blocked
7111 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007112 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01007113#ifdef DEBUG
7114 Label check_inlined_codesize;
7115 masm_->bind(&check_inlined_codesize);
7116#endif
7117
Steve Block6ded16b2010-05-10 14:33:55 +01007118 // Read the fixed array map from the constant pool (not from the root
7119 // array) so that the value can be patched. When debugging, we patch this
7120 // comparison to always fail so that we will hit the IC call in the
7121 // deferred code which will allow the debugger to break for fast case
7122 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01007123 __ mov(scratch3, Operand(Factory::fixed_array_map()));
7124 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01007125 deferred->Branch(ne);
7126
7127 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01007128 __ add(scratch1, scratch1,
7129 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7130 __ str(value,
7131 MemOperand(scratch1, key, LSL,
7132 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01007133
7134 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01007135 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01007136 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7137 }
7138
Steve Block8defd9f2010-07-08 12:39:36 +01007139 ASSERT(we_remembered_the_write_barrier);
7140
Steve Block6ded16b2010-05-10 14:33:55 +01007141 deferred->BindExit();
7142 } else {
7143 frame()->CallKeyedStoreIC();
7144 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007145}
7146
7147
Steve Blocka7e24c12009-10-30 11:49:00 +00007148#ifdef DEBUG
7149bool CodeGenerator::HasValidEntryRegisters() { return true; }
7150#endif
7151
7152
7153#undef __
7154#define __ ACCESS_MASM(masm)
7155
Steve Blocka7e24c12009-10-30 11:49:00 +00007156Handle<String> Reference::GetName() {
7157 ASSERT(type_ == NAMED);
7158 Property* property = expression_->AsProperty();
7159 if (property == NULL) {
7160 // Global variable reference treated as a named property reference.
7161 VariableProxy* proxy = expression_->AsVariableProxy();
7162 ASSERT(proxy->AsVariable() != NULL);
7163 ASSERT(proxy->AsVariable()->is_global());
7164 return proxy->name();
7165 } else {
7166 Literal* raw_name = property->key()->AsLiteral();
7167 ASSERT(raw_name != NULL);
7168 return Handle<String>(String::cast(*raw_name->handle()));
7169 }
7170}
7171
7172
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007173void Reference::DupIfPersist() {
7174 if (persist_after_get_) {
7175 switch (type_) {
7176 case KEYED:
7177 cgen_->frame()->Dup2();
7178 break;
7179 case NAMED:
7180 cgen_->frame()->Dup();
7181 // Fall through.
7182 case UNLOADED:
7183 case ILLEGAL:
7184 case SLOT:
7185 // Do nothing.
7186 ;
7187 }
7188 } else {
7189 set_unloaded();
7190 }
7191}
7192
7193
Steve Blockd0582a62009-12-15 09:54:21 +00007194void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007195 ASSERT(cgen_->HasValidEntryRegisters());
7196 ASSERT(!is_illegal());
7197 ASSERT(!cgen_->has_cc());
7198 MacroAssembler* masm = cgen_->masm();
7199 Property* property = expression_->AsProperty();
7200 if (property != NULL) {
7201 cgen_->CodeForSourcePosition(property->position());
7202 }
7203
7204 switch (type_) {
7205 case SLOT: {
7206 Comment cmnt(masm, "[ Load from Slot");
7207 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7208 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007209 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007210 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007211 break;
7212 }
7213
7214 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007215 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01007216 bool is_global = var != NULL;
7217 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007218 Handle<String> name = GetName();
7219 DupIfPersist();
7220 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00007221 break;
7222 }
7223
7224 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01007225 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007226 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007227 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00007228 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007229 break;
7230 }
7231
7232 default:
7233 UNREACHABLE();
7234 }
7235}
7236
7237
Steve Block8defd9f2010-07-08 12:39:36 +01007238void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007239 ASSERT(!is_illegal());
7240 ASSERT(!cgen_->has_cc());
7241 MacroAssembler* masm = cgen_->masm();
7242 VirtualFrame* frame = cgen_->frame();
7243 Property* property = expression_->AsProperty();
7244 if (property != NULL) {
7245 cgen_->CodeForSourcePosition(property->position());
7246 }
7247
7248 switch (type_) {
7249 case SLOT: {
7250 Comment cmnt(masm, "[ Store to Slot");
7251 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
Leon Clarkee46be812010-01-19 14:06:41 +00007252 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01007253 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007254 break;
7255 }
7256
7257 case NAMED: {
7258 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01007259 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007260 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00007261 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007262 break;
7263 }
7264
7265 case KEYED: {
7266 Comment cmnt(masm, "[ Store to keyed Property");
7267 Property* property = expression_->AsProperty();
7268 ASSERT(property != NULL);
7269 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01007270 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00007271 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01007272 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007273 break;
7274 }
7275
7276 default:
7277 UNREACHABLE();
7278 }
7279}
7280
7281
Leon Clarkee46be812010-01-19 14:06:41 +00007282const char* GenericBinaryOpStub::GetName() {
7283 if (name_ != NULL) return name_;
7284 const int len = 100;
7285 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7286 if (name_ == NULL) return "OOM";
7287 const char* op_name = Token::Name(op_);
7288 const char* overwrite_name;
7289 switch (mode_) {
7290 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7291 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7292 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7293 default: overwrite_name = "UnknownOverwrite"; break;
7294 }
7295
7296 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007297 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00007298 op_name,
7299 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007300 specialized_on_rhs_ ? "_ConstantRhs" : "",
7301 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00007302 return name_;
7303}
7304
7305
Steve Blocka7e24c12009-10-30 11:49:00 +00007306#undef __
7307
7308} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01007309
7310#endif // V8_TARGET_ARCH_ARM