blob: 1ca236d11798b81ccb00bad7de2897749b49076b [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010036#include "ic-inl.h"
37#include "jsregexp.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010038#include "jump-target-light-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010040#include "regexp-macro-assembler.h"
41#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000042#include "register-allocator-inl.h"
43#include "runtime.h"
44#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010045#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010046#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000047
48namespace v8 {
49namespace internal {
50
Kristian Monsen25f61362010-05-21 11:50:48 +010051
Steve Blocka7e24c12009-10-30 11:49:00 +000052static void EmitIdenticalObjectComparison(MacroAssembler* masm,
53 Label* slow,
Leon Clarkee46be812010-01-19 14:06:41 +000054 Condition cc,
55 bool never_nan_nan);
Steve Blocka7e24c12009-10-30 11:49:00 +000056static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Leon Clarkee46be812010-01-19 14:06:41 +000057 Label* lhs_not_nan,
Steve Blocka7e24c12009-10-30 11:49:00 +000058 Label* slow,
59 bool strict);
60static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc);
61static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm);
62static void MultiplyByKnownInt(MacroAssembler* masm,
63 Register source,
64 Register destination,
65 int known_int);
66static bool IsEasyToMultiplyBy(int x);
67
68
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010069#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000070
71// -------------------------------------------------------------------------
72// Platform-specific DeferredCode functions.
73
74void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010075 // On ARM you either have a completely spilled frame or you
76 // handle it yourself, but at the moment there's no automation
77 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000078}
79
80
81void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010082}
83
84
85// -------------------------------------------------------------------------
86// Platform-specific RuntimeCallHelper functions.
87
88void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
89 frame_state_->frame()->AssertIsSpilled();
90}
91
92
93void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
94}
95
96
97void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
98 masm->EnterInternalFrame();
99}
100
101
102void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
103 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000104}
105
106
107// -------------------------------------------------------------------------
108// CodeGenState implementation.
109
110CodeGenState::CodeGenState(CodeGenerator* owner)
111 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100112 previous_(owner->state()) {
113 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000114}
115
116
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100117ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
118 JumpTarget* true_target,
119 JumpTarget* false_target)
120 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000121 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100122 false_target_(false_target) {
123 owner->set_state(this);
124}
125
126
127TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
128 Slot* slot,
129 TypeInfo type_info)
130 : CodeGenState(owner),
131 slot_(slot) {
132 owner->set_state(this);
133 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000134}
135
136
137CodeGenState::~CodeGenState() {
138 ASSERT(owner_->state() == this);
139 owner_->set_state(previous_);
140}
141
142
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100143TypeInfoCodeGenState::~TypeInfoCodeGenState() {
144 owner()->set_type_info(slot_, old_type_info_);
145}
146
Steve Blocka7e24c12009-10-30 11:49:00 +0000147// -------------------------------------------------------------------------
148// CodeGenerator implementation
149
Andrei Popescu31002712010-02-23 13:46:05 +0000150CodeGenerator::CodeGenerator(MacroAssembler* masm)
151 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000152 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000153 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 frame_(NULL),
155 allocator_(NULL),
156 cc_reg_(al),
157 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100158 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100159 type_info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000160 function_return_is_shadowed_(false) {
161}
162
163
164// Calling conventions:
165// fp: caller's frame pointer
166// sp: stack pointer
167// r1: called JS function
168// cp: callee's context
169
Andrei Popescu402d9372010-02-26 13:31:12 +0000170void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000171 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000172 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100173 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000174
175 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000176 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100177
178 int slots = scope()->num_parameters() + scope()->num_stack_slots();
179 ScopedVector<TypeInfo> type_info_array(slots);
180 type_info_ = &type_info_array;
181
Steve Blocka7e24c12009-10-30 11:49:00 +0000182 ASSERT(allocator_ == NULL);
183 RegisterAllocator register_allocator(this);
184 allocator_ = &register_allocator;
185 ASSERT(frame_ == NULL);
186 frame_ = new VirtualFrame();
187 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100188
189 // Adjust for function-level loop nesting.
190 ASSERT_EQ(0, loop_nesting_);
191 loop_nesting_ = info->loop_nesting();
192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193 {
194 CodeGenState state(this);
195
196 // Entry:
197 // Stack: receiver, arguments
198 // lr: return address
199 // fp: caller's frame pointer
200 // sp: stack pointer
201 // r1: called JS function
202 // cp: callee's context
203 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000204
Steve Blocka7e24c12009-10-30 11:49:00 +0000205#ifdef DEBUG
206 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000207 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000208 frame_->SpillAll();
209 __ stop("stop-at");
210 }
211#endif
212
Andrei Popescu402d9372010-02-26 13:31:12 +0000213 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000214 frame_->Enter();
215 // tos: code slot
216
217 // Allocate space for locals and initialize them. This also checks
218 // for stack overflow.
219 frame_->AllocateStackSlots();
220
Steve Block6ded16b2010-05-10 14:33:55 +0100221 VirtualFrame::SpilledScope spilled_scope(frame_);
Kristian Monsen25f61362010-05-21 11:50:48 +0100222 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000223 if (heap_slots > 0) {
224 // Allocate local context.
225 // Get outer context and create a new context based on it.
226 __ ldr(r0, frame_->Function());
227 frame_->EmitPush(r0);
228 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
229 FastNewContextStub stub(heap_slots);
230 frame_->CallStub(&stub, 1);
231 } else {
232 frame_->CallRuntime(Runtime::kNewContext, 1);
233 }
234
235#ifdef DEBUG
236 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +0100237 __ cmp(r0, cp);
Leon Clarke4515c472010-02-03 11:58:03 +0000238 verified_true.Branch(eq);
239 __ stop("NewContext: r0 is expected to be the same as cp");
240 verified_true.Bind();
241#endif
242 // Update context local.
243 __ str(cp, frame_->Context());
244 }
245
246 // TODO(1241774): Improve this code:
247 // 1) only needed if we have a context
248 // 2) no need to recompute context ptr every single time
249 // 3) don't copy parameter operand code from SlotOperand!
250 {
251 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000252 // Note that iteration order is relevant here! If we have the same
253 // parameter twice (e.g., function (x, y, x)), and that parameter
254 // needs to be copied into the context, it must be the last argument
255 // passed to the parameter that needs to be copied. This is a rare
256 // case so we don't check for it, instead we rely on the copying
257 // order: such a parameter is copied repeatedly into the same
258 // context location and thus the last value is what is seen inside
259 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000260 for (int i = 0; i < scope()->num_parameters(); i++) {
261 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000262 Slot* slot = par->slot();
263 if (slot != NULL && slot->type() == Slot::CONTEXT) {
Andrei Popescu31002712010-02-23 13:46:05 +0000264 ASSERT(!scope()->is_global_scope()); // No params in global scope.
Leon Clarke4515c472010-02-03 11:58:03 +0000265 __ ldr(r1, frame_->ParameterAt(i));
266 // Loads r2 with context; used below in RecordWrite.
267 __ str(r1, SlotOperand(slot, r2));
268 // Load the offset into r3.
269 int slot_offset =
270 FixedArray::kHeaderSize + slot->index() * kPointerSize;
271 __ mov(r3, Operand(slot_offset));
272 __ RecordWrite(r2, r3, r1);
273 }
274 }
275 }
276
277 // Store the arguments object. This must happen after context
Steve Block6ded16b2010-05-10 14:33:55 +0100278 // initialization because the arguments object may be stored in
279 // the context.
280 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
281 StoreArgumentsObject(true);
Leon Clarke4515c472010-02-03 11:58:03 +0000282 }
283
284 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000285 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000286 __ mov(ip, Operand(Factory::the_hole_value()));
287 frame_->EmitPush(ip);
Andrei Popescu31002712010-02-23 13:46:05 +0000288 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000289 }
290 } else {
291 // When used as the secondary compiler for splitting, r1, cp,
292 // fp, and lr have been pushed on the stack. Adjust the virtual
293 // frame to match this state.
294 frame_->Adjust(4);
Andrei Popescu402d9372010-02-26 13:31:12 +0000295
296 // Bind all the bailout labels to the beginning of the function.
297 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
298 for (int i = 0; i < bailouts->length(); i++) {
299 __ bind(bailouts->at(i)->label());
300 }
Leon Clarke4515c472010-02-03 11:58:03 +0000301 }
302
Steve Blocka7e24c12009-10-30 11:49:00 +0000303 // Initialize the function return target after the locals are set
304 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100305 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 function_return_is_shadowed_ = false;
307
Steve Blocka7e24c12009-10-30 11:49:00 +0000308 // Generate code to 'execute' declarations and initialize functions
309 // (source elements). In case of an illegal redeclaration we need to
310 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000311 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000312 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000313 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000314 } else {
315 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000316 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 // Bail out if a stack-overflow exception occurred when processing
318 // declarations.
319 if (HasStackOverflow()) return;
320 }
321
322 if (FLAG_trace) {
323 frame_->CallRuntime(Runtime::kTraceEnter, 0);
324 // Ignore the return value.
325 }
326
327 // Compile the body of the function in a vanilla state. Don't
328 // bother compiling all the code if the scope has an illegal
329 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000330 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 Comment cmnt(masm_, "[ function body");
332#ifdef DEBUG
333 bool is_builtin = Bootstrapper::IsActive();
334 bool should_trace =
335 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
336 if (should_trace) {
337 frame_->CallRuntime(Runtime::kDebugTrace, 0);
338 // Ignore the return value.
339 }
340#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100341 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000342 }
343 }
344
345 // Generate the return sequence if necessary.
346 if (has_valid_frame() || function_return_.is_linked()) {
347 if (!function_return_.is_linked()) {
Andrei Popescu31002712010-02-23 13:46:05 +0000348 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 }
350 // exit
351 // r0: result
352 // sp: stack pointer
353 // fp: frame pointer
354 // cp: callee's context
355 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
356
357 function_return_.Bind();
358 if (FLAG_trace) {
359 // Push the return value on the stack as the parameter.
360 // Runtime::TraceExit returns the parameter as it is.
361 frame_->EmitPush(r0);
362 frame_->CallRuntime(Runtime::kTraceExit, 1);
363 }
364
Steve Block6ded16b2010-05-10 14:33:55 +0100365#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +0000366 // Add a label for checking the size of the code used for returning.
367 Label check_exit_codesize;
368 masm_->bind(&check_exit_codesize);
Steve Block6ded16b2010-05-10 14:33:55 +0100369#endif
370 // Make sure that the constant pool is not emitted inside of the return
371 // sequence.
372 { Assembler::BlockConstPoolScope block_const_pool(masm_);
373 // Tear down the frame which will restore the caller's frame pointer and
374 // the link register.
375 frame_->Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +0000376
Steve Block6ded16b2010-05-10 14:33:55 +0100377 // Here we use masm_-> instead of the __ macro to avoid the code coverage
378 // tool from instrumenting as we rely on the code size here.
379 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
380 masm_->add(sp, sp, Operand(sp_delta));
381 masm_->Jump(lr);
382
383#ifdef DEBUG
384 // Check that the size of the code used for returning matches what is
385 // expected by the debugger. If the sp_delts above cannot be encoded in
386 // the add instruction the add will generate two instructions.
387 int return_sequence_length =
388 masm_->InstructionsGeneratedSince(&check_exit_codesize);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100389 CHECK(return_sequence_length ==
390 Assembler::kJSReturnSequenceInstructions ||
391 return_sequence_length ==
392 Assembler::kJSReturnSequenceInstructions + 1);
Steve Block6ded16b2010-05-10 14:33:55 +0100393#endif
Steve Blockd0582a62009-12-15 09:54:21 +0000394 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000395 }
396
Steve Block6ded16b2010-05-10 14:33:55 +0100397 // Adjust for function-level loop nesting.
398 ASSERT(loop_nesting_ == info->loop_nesting());
399 loop_nesting_ = 0;
400
Steve Blocka7e24c12009-10-30 11:49:00 +0000401 // Code generation state must be reset.
402 ASSERT(!has_cc());
403 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100404 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000405 ASSERT(!function_return_is_shadowed_);
406 function_return_.Unuse();
407 DeleteFrame();
408
409 // Process any deferred code using the register allocator.
410 if (!HasStackOverflow()) {
411 ProcessDeferred();
412 }
413
414 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100415 type_info_ = NULL;
416}
417
418
419int CodeGenerator::NumberOfSlot(Slot* slot) {
420 if (slot == NULL) return kInvalidSlotNumber;
421 switch (slot->type()) {
422 case Slot::PARAMETER:
423 return slot->index();
424 case Slot::LOCAL:
425 return slot->index() + scope()->num_parameters();
426 default:
427 break;
428 }
429 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000430}
431
432
433MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
434 // Currently, this assertion will fail if we try to assign to
435 // a constant variable that is constant because it is read-only
436 // (such as the variable referring to a named function expression).
437 // We need to implement assignments to read-only variables.
438 // Ideally, we should do this during AST generation (by converting
439 // such assignments into expression statements); however, in general
440 // we may not be able to make the decision until past AST generation,
441 // that is when the entire program is known.
442 ASSERT(slot != NULL);
443 int index = slot->index();
444 switch (slot->type()) {
445 case Slot::PARAMETER:
446 return frame_->ParameterAt(index);
447
448 case Slot::LOCAL:
449 return frame_->LocalAt(index);
450
451 case Slot::CONTEXT: {
452 // Follow the context chain if necessary.
453 ASSERT(!tmp.is(cp)); // do not overwrite context register
454 Register context = cp;
455 int chain_length = scope()->ContextChainLength(slot->var()->scope());
456 for (int i = 0; i < chain_length; i++) {
457 // Load the closure.
458 // (All contexts, even 'with' contexts, have a closure,
459 // and it is the same for all contexts inside a function.
460 // There is no need to go to the function context first.)
461 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
462 // Load the function context (which is the incoming, outer context).
463 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
464 context = tmp;
465 }
466 // We may have a 'with' context now. Get the function context.
467 // (In fact this mov may never be the needed, since the scope analysis
468 // may not permit a direct context access in this case and thus we are
469 // always at a function context. However it is safe to dereference be-
470 // cause the function context of a function context is itself. Before
471 // deleting this mov we should try to create a counter-example first,
472 // though...)
473 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
474 return ContextOperand(tmp, index);
475 }
476
477 default:
478 UNREACHABLE();
479 return MemOperand(r0, 0);
480 }
481}
482
483
484MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
485 Slot* slot,
486 Register tmp,
487 Register tmp2,
488 JumpTarget* slow) {
489 ASSERT(slot->type() == Slot::CONTEXT);
490 Register context = cp;
491
492 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
493 if (s->num_heap_slots() > 0) {
494 if (s->calls_eval()) {
495 // Check that extension is NULL.
496 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
497 __ tst(tmp2, tmp2);
498 slow->Branch(ne);
499 }
500 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
501 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
502 context = tmp;
503 }
504 }
505 // Check that last extension is NULL.
506 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
507 __ tst(tmp2, tmp2);
508 slow->Branch(ne);
509 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
510 return ContextOperand(tmp, slot->index());
511}
512
513
514// Loads a value on TOS. If it is a boolean value, the result may have been
515// (partially) translated into branches, or it may have set the condition
516// code register. If force_cc is set, the value is forced to set the
517// condition code register and no value is pushed. If the condition code
518// register was set, has_cc() is true and cc_reg_ contains the condition to
519// test for 'true'.
520void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 JumpTarget* true_target,
522 JumpTarget* false_target,
523 bool force_cc) {
524 ASSERT(!has_cc());
525 int original_height = frame_->height();
526
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100527 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 Visit(x);
529
530 // If we hit a stack overflow, we may not have actually visited
531 // the expression. In that case, we ensure that we have a
532 // valid-looking frame state because we will continue to generate
533 // code as we unwind the C++ stack.
534 //
535 // It's possible to have both a stack overflow and a valid frame
536 // state (eg, a subexpression overflowed, visiting it returned
537 // with a dummied frame state, and visiting this expression
538 // returned with a normal-looking state).
539 if (HasStackOverflow() &&
540 has_valid_frame() &&
541 !has_cc() &&
542 frame_->height() == original_height) {
Steve Block6ded16b2010-05-10 14:33:55 +0100543 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +0000544 true_target->Jump();
545 }
546 }
547 if (force_cc && frame_ != NULL && !has_cc()) {
548 // Convert the TOS value to a boolean in the condition code register.
549 ToBoolean(true_target, false_target);
550 }
551 ASSERT(!force_cc || !has_valid_frame() || has_cc());
552 ASSERT(!has_valid_frame() ||
553 (has_cc() && frame_->height() == original_height) ||
554 (!has_cc() && frame_->height() == original_height + 1));
555}
556
557
Steve Blockd0582a62009-12-15 09:54:21 +0000558void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000559#ifdef DEBUG
560 int original_height = frame_->height();
561#endif
562 JumpTarget true_target;
563 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000564 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000565
566 if (has_cc()) {
567 // Convert cc_reg_ into a boolean value.
Steve Block6ded16b2010-05-10 14:33:55 +0100568 VirtualFrame::SpilledScope scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 JumpTarget loaded;
570 JumpTarget materialize_true;
571 materialize_true.Branch(cc_reg_);
572 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
573 frame_->EmitPush(r0);
574 loaded.Jump();
575 materialize_true.Bind();
576 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
577 frame_->EmitPush(r0);
578 loaded.Bind();
579 cc_reg_ = al;
580 }
581
582 if (true_target.is_linked() || false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100583 VirtualFrame::SpilledScope scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000584 // We have at least one condition value that has been "translated"
585 // into a branch, thus it needs to be loaded explicitly.
586 JumpTarget loaded;
587 if (frame_ != NULL) {
588 loaded.Jump(); // Don't lose the current TOS.
589 }
590 bool both = true_target.is_linked() && false_target.is_linked();
591 // Load "true" if necessary.
592 if (true_target.is_linked()) {
593 true_target.Bind();
594 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
595 frame_->EmitPush(r0);
596 }
597 // If both "true" and "false" need to be loaded jump across the code for
598 // "false".
599 if (both) {
600 loaded.Jump();
601 }
602 // Load "false" if necessary.
603 if (false_target.is_linked()) {
604 false_target.Bind();
605 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
606 frame_->EmitPush(r0);
607 }
608 // A value is loaded on all paths reaching this point.
609 loaded.Bind();
610 }
611 ASSERT(has_valid_frame());
612 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100613 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000614}
615
616
617void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100618 Register reg = frame_->GetTOSRegister();
619 __ ldr(reg, GlobalObject());
620 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000621}
622
623
624void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block6ded16b2010-05-10 14:33:55 +0100625 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX));
627 __ ldr(scratch,
628 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset));
629 frame_->EmitPush(scratch);
630}
631
632
Steve Block6ded16b2010-05-10 14:33:55 +0100633ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
634 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
635 ASSERT(scope()->arguments_shadow() != NULL);
636 // We don't want to do lazy arguments allocation for functions that
637 // have heap-allocated contexts, because it interfers with the
638 // uninitialized const tracking in the context objects.
639 return (scope()->num_heap_slots() > 0)
640 ? EAGER_ARGUMENTS_ALLOCATION
641 : LAZY_ARGUMENTS_ALLOCATION;
642}
643
644
645void CodeGenerator::StoreArgumentsObject(bool initial) {
646 VirtualFrame::SpilledScope spilled_scope(frame_);
647
648 ArgumentsAllocationMode mode = ArgumentsMode();
649 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
650
651 Comment cmnt(masm_, "[ store arguments object");
652 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
653 // When using lazy arguments allocation, we store the hole value
654 // as a sentinel indicating that the arguments object hasn't been
655 // allocated yet.
656 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
657 frame_->EmitPush(ip);
658 } else {
659 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
660 __ ldr(r2, frame_->Function());
661 // The receiver is below the arguments, the return address, and the
662 // frame pointer on the stack.
663 const int kReceiverDisplacement = 2 + scope()->num_parameters();
664 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
665 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
666 frame_->Adjust(3);
667 __ Push(r2, r1, r0);
668 frame_->CallStub(&stub, 3);
669 frame_->EmitPush(r0);
670 }
671
672 Variable* arguments = scope()->arguments()->var();
673 Variable* shadow = scope()->arguments_shadow()->var();
674 ASSERT(arguments != NULL && arguments->slot() != NULL);
675 ASSERT(shadow != NULL && shadow->slot() != NULL);
676 JumpTarget done;
677 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
678 // We have to skip storing into the arguments slot if it has
679 // already been written to. This can happen if the a function
680 // has a local variable named 'arguments'.
681 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
682 frame_->EmitPop(r0);
683 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
684 __ cmp(r0, ip);
685 done.Branch(ne);
686 }
687 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
688 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
689 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
690}
691
692
Steve Blockd0582a62009-12-15 09:54:21 +0000693void CodeGenerator::LoadTypeofExpression(Expression* expr) {
694 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000695 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000696 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000697 // For a global variable we build the property reference
698 // <global>.<variable> and perform a (regular non-contextual) property
699 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000700 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
701 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000703 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100704 ref.GetValue();
Steve Blockd0582a62009-12-15 09:54:21 +0000705 } else if (variable != NULL && variable->slot() != NULL) {
706 // For a variable that rewrites to a slot, we signal it is the immediate
707 // subexpression of a typeof.
Steve Block6ded16b2010-05-10 14:33:55 +0100708 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000710 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100711 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000712 }
713}
714
715
Leon Clarked91b9f72010-01-27 17:25:45 +0000716Reference::Reference(CodeGenerator* cgen,
717 Expression* expression,
718 bool persist_after_get)
719 : cgen_(cgen),
720 expression_(expression),
721 type_(ILLEGAL),
722 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 cgen->LoadReference(this);
724}
725
726
727Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000728 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000729}
730
731
732void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 Comment cmnt(masm_, "[ LoadReference");
734 Expression* e = ref->expression();
735 Property* property = e->AsProperty();
736 Variable* var = e->AsVariableProxy()->AsVariable();
737
738 if (property != NULL) {
739 // The expression is either a property or a variable proxy that rewrites
740 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100741 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000742 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000743 ref->set_type(Reference::NAMED);
744 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100745 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000746 ref->set_type(Reference::KEYED);
747 }
748 } else if (var != NULL) {
749 // The expression is a variable proxy that does not rewrite to a
750 // property. Global variables are treated as named property references.
751 if (var->is_global()) {
752 LoadGlobal();
753 ref->set_type(Reference::NAMED);
754 } else {
755 ASSERT(var->slot() != NULL);
756 ref->set_type(Reference::SLOT);
757 }
758 } else {
759 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100760 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000761 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
762 }
763}
764
765
766void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000767 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000768 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100769 if (size == 0) return;
770
771 // Pop a reference from the stack while preserving TOS.
772 VirtualFrame::RegisterAllocationScope scope(this);
773 Comment cmnt(masm_, "[ UnloadReference");
774 if (size > 0) {
775 Register tos = frame_->PopToRegister();
776 frame_->Drop(size);
777 frame_->EmitPush(tos);
778 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000779}
780
781
782// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
783// register to a boolean in the condition code register. The code
784// may jump to 'false_target' in case the register converts to 'false'.
785void CodeGenerator::ToBoolean(JumpTarget* true_target,
786 JumpTarget* false_target) {
Steve Block6ded16b2010-05-10 14:33:55 +0100787 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 // Note: The generated code snippet does not change stack variables.
789 // Only the condition code should be set.
790 frame_->EmitPop(r0);
791
792 // Fast case checks
793
794 // Check if the value is 'false'.
795 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
796 __ cmp(r0, ip);
797 false_target->Branch(eq);
798
799 // Check if the value is 'true'.
800 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
801 __ cmp(r0, ip);
802 true_target->Branch(eq);
803
804 // Check if the value is 'undefined'.
805 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
806 __ cmp(r0, ip);
807 false_target->Branch(eq);
808
809 // Check if the value is a smi.
810 __ cmp(r0, Operand(Smi::FromInt(0)));
811 false_target->Branch(eq);
812 __ tst(r0, Operand(kSmiTagMask));
813 true_target->Branch(eq);
814
815 // Slow case: call the runtime.
816 frame_->EmitPush(r0);
817 frame_->CallRuntime(Runtime::kToBool, 1);
818 // Convert the result (r0) to a condition code.
819 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
820 __ cmp(r0, ip);
821
822 cc_reg_ = ne;
823}
824
825
826void CodeGenerator::GenericBinaryOperation(Token::Value op,
827 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100828 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100830 // top of virtual frame: y
831 // 2nd elt. on virtual frame : x
832 // result : top of virtual frame
833
834 // Stub is entered with a call: 'return address' is in lr.
835 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100836 case Token::ADD:
837 case Token::SUB:
838 if (inline_smi) {
839 JumpTarget done;
840 Register rhs = frame_->PopToRegister();
841 Register lhs = frame_->PopToRegister(rhs);
842 Register scratch = VirtualFrame::scratch0();
843 __ orr(scratch, rhs, Operand(lhs));
844 // Check they are both small and positive.
845 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
846 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
847 ASSERT_EQ(0, kSmiTag);
848 if (op == Token::ADD) {
849 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
850 } else {
851 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
852 }
853 done.Branch(eq);
854 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
855 frame_->SpillAll();
856 frame_->CallStub(&stub, 0);
857 done.Bind();
858 frame_->EmitPush(r0);
859 break;
860 } else {
861 // Fall through!
862 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000863 case Token::BIT_OR:
864 case Token::BIT_AND:
865 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100866 if (inline_smi) {
867 bool rhs_is_smi = frame_->KnownSmiAt(0);
868 bool lhs_is_smi = frame_->KnownSmiAt(1);
869 Register rhs = frame_->PopToRegister();
870 Register lhs = frame_->PopToRegister(rhs);
871 Register smi_test_reg;
872 Condition cond;
873 if (!rhs_is_smi || !lhs_is_smi) {
874 if (rhs_is_smi) {
875 smi_test_reg = lhs;
876 } else if (lhs_is_smi) {
877 smi_test_reg = rhs;
878 } else {
879 smi_test_reg = VirtualFrame::scratch0();
880 __ orr(smi_test_reg, rhs, Operand(lhs));
881 }
882 // Check they are both Smis.
883 __ tst(smi_test_reg, Operand(kSmiTagMask));
884 cond = eq;
885 } else {
886 cond = al;
887 }
888 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
889 if (op == Token::BIT_OR) {
890 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
891 } else if (op == Token::BIT_AND) {
892 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
893 } else {
894 ASSERT(op == Token::BIT_XOR);
895 ASSERT_EQ(0, kSmiTag);
896 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
897 }
898 if (cond != al) {
899 JumpTarget done;
900 done.Branch(cond);
901 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
902 frame_->SpillAll();
903 frame_->CallStub(&stub, 0);
904 done.Bind();
905 }
906 frame_->EmitPush(r0);
907 break;
908 } else {
909 // Fall through!
910 }
911 case Token::MUL:
912 case Token::DIV:
913 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000914 case Token::SHL:
915 case Token::SHR:
916 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100917 Register rhs = frame_->PopToRegister();
918 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100919 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
920 frame_->SpillAll();
921 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100922 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000923 break;
924 }
925
Steve Block6ded16b2010-05-10 14:33:55 +0100926 case Token::COMMA: {
927 Register scratch = frame_->PopToRegister();
928 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000929 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100930 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000931 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100932 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000933
934 default:
935 // Other cases should have been handled before this point.
936 UNREACHABLE();
937 break;
938 }
939}
940
941
942class DeferredInlineSmiOperation: public DeferredCode {
943 public:
944 DeferredInlineSmiOperation(Token::Value op,
945 int value,
946 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100947 OverwriteMode overwrite_mode,
948 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000949 : op_(op),
950 value_(value),
951 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100952 overwrite_mode_(overwrite_mode),
953 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 set_comment("[ DeferredInlinedSmiOperation");
955 }
956
957 virtual void Generate();
958
959 private:
960 Token::Value op_;
961 int value_;
962 bool reversed_;
963 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100964 Register tos_register_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000965};
966
967
968void DeferredInlineSmiOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +0100969 Register lhs = r1;
970 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 switch (op_) {
972 case Token::ADD: {
973 // Revert optimistic add.
974 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100975 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 __ mov(r1, Operand(Smi::FromInt(value_)));
977 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100978 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000979 __ mov(r0, Operand(Smi::FromInt(value_)));
980 }
981 break;
982 }
983
984 case Token::SUB: {
985 // Revert optimistic sub.
986 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100987 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 __ mov(r1, Operand(Smi::FromInt(value_)));
989 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100990 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 __ mov(r0, Operand(Smi::FromInt(value_)));
992 }
993 break;
994 }
995
996 // For these operations there is no optimistic operation that needs to be
997 // reverted.
998 case Token::MUL:
999 case Token::MOD:
1000 case Token::BIT_OR:
1001 case Token::BIT_XOR:
1002 case Token::BIT_AND: {
1003 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +01001004 if (tos_register_.is(r0)) {
1005 __ mov(r1, Operand(Smi::FromInt(value_)));
1006 } else {
1007 ASSERT(tos_register_.is(r1));
1008 __ mov(r0, Operand(Smi::FromInt(value_)));
1009 lhs = r0;
1010 rhs = r1;
1011 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001012 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001013 if (tos_register_.is(r1)) {
1014 __ mov(r0, Operand(Smi::FromInt(value_)));
1015 } else {
1016 ASSERT(tos_register_.is(r0));
1017 __ mov(r1, Operand(Smi::FromInt(value_)));
1018 lhs = r0;
1019 rhs = r1;
1020 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001021 }
1022 break;
1023 }
1024
1025 case Token::SHL:
1026 case Token::SHR:
1027 case Token::SAR: {
1028 if (!reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +01001029 if (tos_register_.is(r1)) {
1030 __ mov(r0, Operand(Smi::FromInt(value_)));
1031 } else {
1032 ASSERT(tos_register_.is(r0));
1033 __ mov(r1, Operand(Smi::FromInt(value_)));
1034 lhs = r0;
1035 rhs = r1;
1036 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001037 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001038 ASSERT(op_ == Token::SHL);
1039 __ mov(r1, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 }
1041 break;
1042 }
1043
1044 default:
1045 // Other cases should have been handled before this point.
1046 UNREACHABLE();
1047 break;
1048 }
1049
Steve Block6ded16b2010-05-10 14:33:55 +01001050 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001051 __ CallStub(&stub);
Steve Block6ded16b2010-05-10 14:33:55 +01001052 // The generic stub returns its value in r0, but that's not
1053 // necessarily what we want. We want whatever the inlined code
1054 // expected, which is that the answer is in the same register as
1055 // the operand was.
1056 __ Move(tos_register_, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001057}
1058
1059
1060static bool PopCountLessThanEqual2(unsigned int x) {
1061 x &= x - 1;
1062 return (x & (x - 1)) == 0;
1063}
1064
1065
1066// Returns the index of the lowest bit set.
1067static int BitPosition(unsigned x) {
1068 int bit_posn = 0;
1069 while ((x & 0xf) == 0) {
1070 bit_posn += 4;
1071 x >>= 4;
1072 }
1073 while ((x & 1) == 0) {
1074 bit_posn++;
1075 x >>= 1;
1076 }
1077 return bit_posn;
1078}
1079
1080
1081void CodeGenerator::SmiOperation(Token::Value op,
1082 Handle<Object> value,
1083 bool reversed,
1084 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 int int_value = Smi::cast(*value)->value();
1086
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001087 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1088
Steve Block6ded16b2010-05-10 14:33:55 +01001089 bool something_to_inline;
1090 switch (op) {
1091 case Token::ADD:
1092 case Token::SUB:
1093 case Token::BIT_AND:
1094 case Token::BIT_OR:
1095 case Token::BIT_XOR: {
1096 something_to_inline = true;
1097 break;
1098 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001099 case Token::SHL: {
1100 something_to_inline = (both_sides_are_smi || !reversed);
1101 break;
1102 }
Steve Block6ded16b2010-05-10 14:33:55 +01001103 case Token::SHR:
1104 case Token::SAR: {
1105 if (reversed) {
1106 something_to_inline = false;
1107 } else {
1108 something_to_inline = true;
1109 }
1110 break;
1111 }
1112 case Token::MOD: {
1113 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1114 something_to_inline = false;
1115 } else {
1116 something_to_inline = true;
1117 }
1118 break;
1119 }
1120 case Token::MUL: {
1121 if (!IsEasyToMultiplyBy(int_value)) {
1122 something_to_inline = false;
1123 } else {
1124 something_to_inline = true;
1125 }
1126 break;
1127 }
1128 default: {
1129 something_to_inline = false;
1130 break;
1131 }
1132 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001133
Steve Block6ded16b2010-05-10 14:33:55 +01001134 if (!something_to_inline) {
1135 if (!reversed) {
1136 // Push the rhs onto the virtual frame by putting it in a TOS register.
1137 Register rhs = frame_->GetTOSRegister();
1138 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001139 frame_->EmitPush(rhs, TypeInfo::Smi());
1140 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001141 } else {
1142 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1143 // at most one pop, the rest takes place in TOS registers.
1144 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1145 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1146 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001147 frame_->EmitPush(lhs, TypeInfo::Smi());
1148 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1149 frame_->EmitPush(rhs, t);
1150 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001151 }
1152 return;
1153 }
1154
1155 // We move the top of stack to a register (normally no move is invoved).
1156 Register tos = frame_->PopToRegister();
1157 // All other registers are spilled. The deferred code expects one argument
1158 // in a register and all other values are flushed to the stack. The
1159 // answer is returned in the same register that the top of stack argument was
1160 // in.
1161 frame_->SpillAll();
1162
Steve Blocka7e24c12009-10-30 11:49:00 +00001163 switch (op) {
1164 case Token::ADD: {
1165 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001166 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001167
Steve Block6ded16b2010-05-10 14:33:55 +01001168 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001169 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001170 if (!both_sides_are_smi) {
1171 __ tst(tos, Operand(kSmiTagMask));
1172 deferred->Branch(ne);
1173 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001174 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001175 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001176 break;
1177 }
1178
1179 case Token::SUB: {
1180 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001181 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001182
1183 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001184 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001185 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001186 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001187 }
1188 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001189 if (!both_sides_are_smi) {
1190 __ tst(tos, Operand(kSmiTagMask));
1191 deferred->Branch(ne);
1192 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001193 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001194 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001195 break;
1196 }
1197
1198
1199 case Token::BIT_OR:
1200 case Token::BIT_XOR:
1201 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001202 if (both_sides_are_smi) {
1203 switch (op) {
1204 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1205 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
1206 case Token::BIT_AND: __ and_(tos, tos, Operand(value)); break;
1207 default: UNREACHABLE();
1208 }
1209 frame_->EmitPush(tos, TypeInfo::Smi());
1210 } else {
1211 DeferredCode* deferred =
1212 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1213 __ tst(tos, Operand(kSmiTagMask));
1214 deferred->Branch(ne);
1215 switch (op) {
1216 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1217 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
1218 case Token::BIT_AND: __ and_(tos, tos, Operand(value)); break;
1219 default: UNREACHABLE();
1220 }
1221 deferred->BindExit();
1222 TypeInfo result_type =
1223 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
1224 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001225 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001226 break;
1227 }
1228
1229 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001230 if (reversed) {
1231 ASSERT(both_sides_are_smi);
1232 int max_shift = 0;
1233 int max_result = int_value == 0 ? 1 : int_value;
1234 while (Smi::IsValid(max_result << 1)) {
1235 max_shift++;
1236 max_result <<= 1;
1237 }
1238 DeferredCode* deferred =
1239 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1240 // Mask off the last 5 bits of the shift operand (rhs). This is part
1241 // of the definition of shift in JS and we know we have a Smi so we
1242 // can safely do this. The masked version gets passed to the
1243 // deferred code, but that makes no difference.
1244 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1245 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1246 deferred->Branch(ge);
1247 Register scratch = VirtualFrame::scratch0();
1248 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1249 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1250 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1251 deferred->BindExit();
1252 TypeInfo result = TypeInfo::Integer32();
1253 frame_->EmitPush(tos, result);
1254 break;
1255 }
1256 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001257 case Token::SHR:
1258 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001259 ASSERT(!reversed);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001260 TypeInfo result = TypeInfo::Integer32();
Steve Block6ded16b2010-05-10 14:33:55 +01001261 Register scratch = VirtualFrame::scratch0();
1262 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00001263 int shift_value = int_value & 0x1f; // least significant 5 bits
1264 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001265 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Kristian Monsen25f61362010-05-21 11:50:48 +01001266 uint32_t problematic_mask = kSmiTagMask;
1267 // For unsigned shift by zero all negative smis are problematic.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001268 bool skip_smi_test = both_sides_are_smi;
1269 if (shift_value == 0 && op == Token::SHR) {
1270 problematic_mask |= 0x80000000;
1271 skip_smi_test = false;
1272 }
1273 if (!skip_smi_test) {
1274 __ tst(tos, Operand(problematic_mask));
1275 deferred->Branch(ne); // Go slow for problematic input.
1276 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001277 switch (op) {
1278 case Token::SHL: {
1279 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001280 int adjusted_shift = shift_value - kSmiTagSize;
1281 ASSERT(adjusted_shift >= 0);
1282 if (adjusted_shift != 0) {
1283 __ mov(scratch, Operand(tos, LSL, adjusted_shift));
1284 // Check that the *signed* result fits in a smi.
1285 __ add(scratch2, scratch, Operand(0x40000000), SetCC);
1286 deferred->Branch(mi);
1287 __ mov(tos, Operand(scratch, LSL, kSmiTagSize));
1288 } else {
1289 // Check that the *signed* result fits in a smi.
1290 __ add(scratch2, tos, Operand(0x40000000), SetCC);
1291 deferred->Branch(mi);
1292 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
1293 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001294 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001295 break;
1296 }
1297 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001298 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001299 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Remove tag.
1300 // LSR by immediate 0 means shifting 32 bits.
Steve Block6ded16b2010-05-10 14:33:55 +01001301 __ mov(scratch, Operand(scratch, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001302 if (shift_value == 1) {
1303 // check that the *unsigned* result fits in a smi
1304 // neither of the two high-order bits can be set:
1305 // - 0x80000000: high bit would be lost when smi tagging
1306 // - 0x40000000: this number would convert to negative when
1307 // smi tagging these two cases can only happen with shifts
1308 // by 0 or 1 when handed a valid smi
1309 __ tst(scratch, Operand(0xc0000000));
1310 deferred->Branch(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001311 } else {
1312 ASSERT(shift_value >= 2);
1313 result = TypeInfo::Smi(); // SHR by at least 2 gives a Smi.
Kristian Monsen25f61362010-05-21 11:50:48 +01001314 }
1315 __ mov(tos, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001316 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001317 break;
1318 }
1319 case Token::SAR: {
Kristian Monsen25f61362010-05-21 11:50:48 +01001320 // In the ARM instructions set, ASR by immediate 0 means shifting 32
1321 // bits.
Steve Blocka7e24c12009-10-30 11:49:00 +00001322 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001323 // Do the shift and the tag removal in one operation. If the shift
1324 // is 31 bits (the highest possible value) then we emit the
1325 // instruction as a shift by 0 which means shift arithmetically by
1326 // 32.
1327 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
1328 // Put tag back.
1329 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001330 // SAR by at least 1 gives a Smi.
1331 result = TypeInfo::Smi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001332 }
1333 break;
1334 }
1335 default: UNREACHABLE();
1336 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001338 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001339 break;
1340 }
1341
1342 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001343 ASSERT(!reversed);
1344 ASSERT(int_value >= 2);
1345 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001348 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001349 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001350 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1351 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001352 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001353 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001354 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1355 frame_->EmitPush(
1356 tos,
1357 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001358 break;
1359 }
1360
1361 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001362 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001363 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001364 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001365 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1366 max_smi_that_wont_overflow <<= kSmiTagSize;
1367 unsigned mask = 0x80000000u;
1368 while ((mask & max_smi_that_wont_overflow) == 0) {
1369 mask |= mask >> 1;
1370 }
1371 mask |= kSmiTagMask;
1372 // This does a single mask that checks for a too high value in a
1373 // conservative way and for a non-Smi. It also filters out negative
1374 // numbers, unfortunately, but since this code is inline we prefer
1375 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001376 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001378 MultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001379 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001380 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 break;
1382 }
1383
1384 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001385 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001386 break;
1387 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001388}
1389
1390
1391void CodeGenerator::Comparison(Condition cc,
1392 Expression* left,
1393 Expression* right,
1394 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001395 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001396
Steve Block6ded16b2010-05-10 14:33:55 +01001397 if (left != NULL) Load(left);
1398 if (right != NULL) Load(right);
1399
Steve Blocka7e24c12009-10-30 11:49:00 +00001400 // sp[0] : y
1401 // sp[1] : x
1402 // result : cc register
1403
1404 // Strict only makes sense for equality comparisons.
1405 ASSERT(!strict || cc == eq);
1406
Steve Block6ded16b2010-05-10 14:33:55 +01001407 Register lhs;
1408 Register rhs;
1409
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001410 bool lhs_is_smi;
1411 bool rhs_is_smi;
1412
Steve Block6ded16b2010-05-10 14:33:55 +01001413 // We load the top two stack positions into registers chosen by the virtual
1414 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001415 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1416 if (cc == gt || cc == le) {
1417 cc = ReverseCondition(cc);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001418 lhs_is_smi = frame_->KnownSmiAt(0);
1419 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001420 lhs = frame_->PopToRegister();
1421 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001422 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001423 rhs_is_smi = frame_->KnownSmiAt(0);
1424 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001425 rhs = frame_->PopToRegister();
1426 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001427 }
Steve Block6ded16b2010-05-10 14:33:55 +01001428
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001429 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1430
Steve Block6ded16b2010-05-10 14:33:55 +01001431 ASSERT(rhs.is(r0) || rhs.is(r1));
1432 ASSERT(lhs.is(r0) || lhs.is(r1));
1433
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001434 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001435
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001436 if (!both_sides_are_smi) {
1437 // Now we have the two sides in r0 and r1. We flush any other registers
1438 // because the stub doesn't know about register allocation.
1439 frame_->SpillAll();
1440 Register scratch = VirtualFrame::scratch0();
1441 Register smi_test_reg;
1442 if (lhs_is_smi) {
1443 smi_test_reg = rhs;
1444 } else if (rhs_is_smi) {
1445 smi_test_reg = lhs;
1446 } else {
1447 __ orr(scratch, lhs, Operand(rhs));
1448 smi_test_reg = scratch;
1449 }
1450 __ tst(smi_test_reg, Operand(kSmiTagMask));
1451 JumpTarget smi;
1452 smi.Branch(eq);
1453
1454 // Perform non-smi comparison by stub.
1455 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1456 // We call with 0 args because there are 0 on the stack.
1457 if (!rhs.is(r0)) {
1458 __ Swap(rhs, lhs, ip);
1459 }
1460
1461 CompareStub stub(cc, strict);
1462 frame_->CallStub(&stub, 0);
1463 __ cmp(r0, Operand(0));
1464 exit.Jump();
1465
1466 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001467 }
1468
Steve Blocka7e24c12009-10-30 11:49:00 +00001469 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001470 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001471
1472 exit.Bind();
1473 cc_reg_ = cc;
1474}
1475
1476
Steve Blocka7e24c12009-10-30 11:49:00 +00001477// Call the function on the stack with the given arguments.
1478void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001479 CallFunctionFlags flags,
1480 int position) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001481 frame_->AssertIsSpilled();
1482
Steve Blocka7e24c12009-10-30 11:49:00 +00001483 // Push the arguments ("left-to-right") on the stack.
1484 int arg_count = args->length();
1485 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001486 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001487 }
1488
1489 // Record the position for debugging purposes.
1490 CodeForSourcePosition(position);
1491
1492 // Use the shared code stub to call the function.
1493 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001494 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 frame_->CallStub(&call_function, arg_count + 1);
1496
1497 // Restore context and pop function from the stack.
1498 __ ldr(cp, frame_->Context());
1499 frame_->Drop(); // discard the TOS
1500}
1501
1502
Steve Block6ded16b2010-05-10 14:33:55 +01001503void CodeGenerator::CallApplyLazy(Expression* applicand,
1504 Expression* receiver,
1505 VariableProxy* arguments,
1506 int position) {
1507 // An optimized implementation of expressions of the form
1508 // x.apply(y, arguments).
1509 // If the arguments object of the scope has not been allocated,
1510 // and x.apply is Function.prototype.apply, this optimization
1511 // just copies y and the arguments of the current function on the
1512 // stack, as receiver and arguments, and calls x.
1513 // In the implementation comments, we call x the applicand
1514 // and y the receiver.
1515 VirtualFrame::SpilledScope spilled_scope(frame_);
1516
1517 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1518 ASSERT(arguments->IsArguments());
1519
1520 // Load applicand.apply onto the stack. This will usually
1521 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001522 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001523 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001524 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001525 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1526 frame_->EmitPush(r0);
1527
1528 // Load the receiver and the existing arguments object onto the
1529 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001530 Load(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01001531 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
1532
1533 // Emit the source position information after having loaded the
1534 // receiver and the arguments.
1535 CodeForSourcePosition(position);
1536 // Contents of the stack at this point:
1537 // sp[0]: arguments object of the current function or the hole.
1538 // sp[1]: receiver
1539 // sp[2]: applicand.apply
1540 // sp[3]: applicand.
1541
1542 // Check if the arguments object has been lazily allocated
1543 // already. If so, just use that instead of copying the arguments
1544 // from the stack. This also deals with cases where a local variable
1545 // named 'arguments' has been introduced.
1546 __ ldr(r0, MemOperand(sp, 0));
1547
1548 Label slow, done;
1549 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1550 __ cmp(ip, r0);
1551 __ b(ne, &slow);
1552
1553 Label build_args;
1554 // Get rid of the arguments object probe.
1555 frame_->Drop();
1556 // Stack now has 3 elements on it.
1557 // Contents of stack at this point:
1558 // sp[0]: receiver
1559 // sp[1]: applicand.apply
1560 // sp[2]: applicand.
1561
1562 // Check that the receiver really is a JavaScript object.
1563 __ ldr(r0, MemOperand(sp, 0));
1564 __ BranchOnSmi(r0, &build_args);
1565 // We allow all JSObjects including JSFunctions. As long as
1566 // JS_FUNCTION_TYPE is the last instance type and it is right
1567 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1568 // bound.
1569 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1570 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1571 __ CompareObjectType(r0, r1, r2, FIRST_JS_OBJECT_TYPE);
1572 __ b(lt, &build_args);
1573
1574 // Check that applicand.apply is Function.prototype.apply.
1575 __ ldr(r0, MemOperand(sp, kPointerSize));
1576 __ BranchOnSmi(r0, &build_args);
1577 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1578 __ b(ne, &build_args);
1579 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
1580 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
1581 __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset));
1582 __ cmp(r1, Operand(apply_code));
1583 __ b(ne, &build_args);
1584
1585 // Check that applicand is a function.
1586 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1587 __ BranchOnSmi(r1, &build_args);
1588 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1589 __ b(ne, &build_args);
1590
1591 // Copy the arguments to this function possibly from the
1592 // adaptor frame below it.
1593 Label invoke, adapted;
1594 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1595 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1596 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1597 __ b(eq, &adapted);
1598
1599 // No arguments adaptor frame. Copy fixed number of arguments.
1600 __ mov(r0, Operand(scope()->num_parameters()));
1601 for (int i = 0; i < scope()->num_parameters(); i++) {
1602 __ ldr(r2, frame_->ParameterAt(i));
1603 __ push(r2);
1604 }
1605 __ jmp(&invoke);
1606
1607 // Arguments adaptor frame present. Copy arguments from there, but
1608 // avoid copying too many arguments to avoid stack overflows.
1609 __ bind(&adapted);
1610 static const uint32_t kArgumentsLimit = 1 * KB;
1611 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1612 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1613 __ mov(r3, r0);
1614 __ cmp(r0, Operand(kArgumentsLimit));
1615 __ b(gt, &build_args);
1616
1617 // Loop through the arguments pushing them onto the execution
1618 // stack. We don't inform the virtual frame of the push, so we don't
1619 // have to worry about getting rid of the elements from the virtual
1620 // frame.
1621 Label loop;
1622 // r3 is a small non-negative integer, due to the test above.
1623 __ cmp(r3, Operand(0));
1624 __ b(eq, &invoke);
1625 // Compute the address of the first argument.
1626 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1627 __ add(r2, r2, Operand(kPointerSize));
1628 __ bind(&loop);
1629 // Post-decrement argument address by kPointerSize on each iteration.
1630 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1631 __ push(r4);
1632 __ sub(r3, r3, Operand(1), SetCC);
1633 __ b(gt, &loop);
1634
1635 // Invoke the function.
1636 __ bind(&invoke);
1637 ParameterCount actual(r0);
1638 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1639 // Drop applicand.apply and applicand from the stack, and push
1640 // the result of the function call, but leave the spilled frame
1641 // unchanged, with 3 elements, so it is correct when we compile the
1642 // slow-case code.
1643 __ add(sp, sp, Operand(2 * kPointerSize));
1644 __ push(r0);
1645 // Stack now has 1 element:
1646 // sp[0]: result
1647 __ jmp(&done);
1648
1649 // Slow-case: Allocate the arguments object since we know it isn't
1650 // there, and fall-through to the slow-case where we call
1651 // applicand.apply.
1652 __ bind(&build_args);
1653 // Stack now has 3 elements, because we have jumped from where:
1654 // sp[0]: receiver
1655 // sp[1]: applicand.apply
1656 // sp[2]: applicand.
1657 StoreArgumentsObject(false);
1658
1659 // Stack and frame now have 4 elements.
1660 __ bind(&slow);
1661
1662 // Generic computation of x.apply(y, args) with no special optimization.
1663 // Flip applicand.apply and applicand on the stack, so
1664 // applicand looks like the receiver of the applicand.apply call.
1665 // Then process it as a normal function call.
1666 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1667 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001668 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001669
1670 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1671 frame_->CallStub(&call_function, 3);
1672 // The function and its two arguments have been dropped.
1673 frame_->Drop(); // Drop the receiver as well.
1674 frame_->EmitPush(r0);
1675 // Stack now has 1 element:
1676 // sp[0]: result
1677 __ bind(&done);
1678
1679 // Restore the context register after a call.
1680 __ ldr(cp, frame_->Context());
1681}
1682
1683
Steve Blocka7e24c12009-10-30 11:49:00 +00001684void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Block6ded16b2010-05-10 14:33:55 +01001685 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001686 ASSERT(has_cc());
1687 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1688 target->Branch(cc);
1689 cc_reg_ = al;
1690}
1691
1692
1693void CodeGenerator::CheckStack() {
Steve Block6ded16b2010-05-10 14:33:55 +01001694 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blockd0582a62009-12-15 09:54:21 +00001695 Comment cmnt(masm_, "[ check stack");
1696 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1697 // Put the lr setup instruction in the delay slot. kInstrSize is added to
1698 // the implicit 8 byte offset that always applies to operations with pc and
1699 // gives a return address 12 bytes down.
1700 masm_->add(lr, pc, Operand(Assembler::kInstrSize));
1701 masm_->cmp(sp, Operand(ip));
1702 StackCheckStub stub;
1703 // Call the stub if lower.
1704 masm_->mov(pc,
1705 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1706 RelocInfo::CODE_TARGET),
1707 LeaveCC,
1708 lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001709}
1710
1711
1712void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1713#ifdef DEBUG
1714 int original_height = frame_->height();
1715#endif
Steve Block6ded16b2010-05-10 14:33:55 +01001716 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001717 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001718 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001719 }
1720 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1721}
1722
1723
1724void CodeGenerator::VisitBlock(Block* node) {
1725#ifdef DEBUG
1726 int original_height = frame_->height();
1727#endif
Steve Block6ded16b2010-05-10 14:33:55 +01001728 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001729 Comment cmnt(masm_, "[ Block");
1730 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001731 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001732 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001733 if (node->break_target()->is_linked()) {
1734 node->break_target()->Bind();
1735 }
1736 node->break_target()->Unuse();
1737 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1738}
1739
1740
1741void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001742 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001743 frame_->EmitPush(Operand(pairs));
1744 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1745
1746 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001747 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1748 // The result is discarded.
1749}
1750
1751
1752void CodeGenerator::VisitDeclaration(Declaration* node) {
1753#ifdef DEBUG
1754 int original_height = frame_->height();
1755#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001756 Comment cmnt(masm_, "[ Declaration");
1757 Variable* var = node->proxy()->var();
1758 ASSERT(var != NULL); // must have been resolved
1759 Slot* slot = var->slot();
1760
1761 // If it was not possible to allocate the variable at compile time,
1762 // we need to "declare" it at runtime to make sure it actually
1763 // exists in the local context.
1764 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1765 // Variables with a "LOOKUP" slot were introduced as non-locals
1766 // during variable resolution and must have mode DYNAMIC.
1767 ASSERT(var->is_dynamic());
1768 // For now, just do a runtime call.
1769 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001770 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001771 // Declaration nodes are always declared in only two modes.
1772 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1773 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001774 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001775 // Push initial value, if any.
1776 // Note: For variables we must not push an initial value (such as
1777 // 'undefined') because we may have a (legal) redeclaration and we
1778 // must not destroy the current value.
1779 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001780 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001781 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001782 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001783 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001784 frame_->EmitPush(Operand(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001785 }
Steve Block6ded16b2010-05-10 14:33:55 +01001786
1787 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1789 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001790
Steve Blocka7e24c12009-10-30 11:49:00 +00001791 ASSERT(frame_->height() == original_height);
1792 return;
1793 }
1794
1795 ASSERT(!var->is_global());
1796
1797 // If we have a function or a constant, we need to initialize the variable.
1798 Expression* val = NULL;
1799 if (node->mode() == Variable::CONST) {
1800 val = new Literal(Factory::the_hole_value());
1801 } else {
1802 val = node->fun(); // NULL if we don't have a function
1803 }
1804
1805 if (val != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001806 // Set initial value.
1807 Reference target(this, node->proxy());
1808 Load(val);
1809 target.SetValue(NOT_CONST_INIT);
1810
Steve Blocka7e24c12009-10-30 11:49:00 +00001811 // Get rid of the assigned value (declarations are statements).
1812 frame_->Drop();
1813 }
1814 ASSERT(frame_->height() == original_height);
1815}
1816
1817
1818void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
1819#ifdef DEBUG
1820 int original_height = frame_->height();
1821#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 Comment cmnt(masm_, "[ ExpressionStatement");
1823 CodeForStatementPosition(node);
1824 Expression* expression = node->expression();
1825 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001826 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00001827 frame_->Drop();
1828 ASSERT(frame_->height() == original_height);
1829}
1830
1831
1832void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
1833#ifdef DEBUG
1834 int original_height = frame_->height();
1835#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001836 Comment cmnt(masm_, "// EmptyStatement");
1837 CodeForStatementPosition(node);
1838 // nothing to do
1839 ASSERT(frame_->height() == original_height);
1840}
1841
1842
1843void CodeGenerator::VisitIfStatement(IfStatement* node) {
1844#ifdef DEBUG
1845 int original_height = frame_->height();
1846#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001847 Comment cmnt(masm_, "[ IfStatement");
1848 // Generate different code depending on which parts of the if statement
1849 // are present or not.
1850 bool has_then_stm = node->HasThenStatement();
1851 bool has_else_stm = node->HasElseStatement();
1852
1853 CodeForStatementPosition(node);
1854
1855 JumpTarget exit;
1856 if (has_then_stm && has_else_stm) {
1857 Comment cmnt(masm_, "[ IfThenElse");
1858 JumpTarget then;
1859 JumpTarget else_;
1860 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001861 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001862 if (frame_ != NULL) {
1863 Branch(false, &else_);
1864 }
1865 // then
1866 if (frame_ != NULL || then.is_linked()) {
1867 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001868 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001869 }
1870 if (frame_ != NULL) {
1871 exit.Jump();
1872 }
1873 // else
1874 if (else_.is_linked()) {
1875 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001876 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001877 }
1878
1879 } else if (has_then_stm) {
1880 Comment cmnt(masm_, "[ IfThen");
1881 ASSERT(!has_else_stm);
1882 JumpTarget then;
1883 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001884 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001885 if (frame_ != NULL) {
1886 Branch(false, &exit);
1887 }
1888 // then
1889 if (frame_ != NULL || then.is_linked()) {
1890 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001891 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001892 }
1893
1894 } else if (has_else_stm) {
1895 Comment cmnt(masm_, "[ IfElse");
1896 ASSERT(!has_then_stm);
1897 JumpTarget else_;
1898 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001899 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001900 if (frame_ != NULL) {
1901 Branch(true, &exit);
1902 }
1903 // else
1904 if (frame_ != NULL || else_.is_linked()) {
1905 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001906 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001907 }
1908
1909 } else {
1910 Comment cmnt(masm_, "[ If");
1911 ASSERT(!has_then_stm && !has_else_stm);
1912 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001913 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 if (frame_ != NULL) {
1915 if (has_cc()) {
1916 cc_reg_ = al;
1917 } else {
1918 frame_->Drop();
1919 }
1920 }
1921 }
1922
1923 // end
1924 if (exit.is_linked()) {
1925 exit.Bind();
1926 }
1927 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1928}
1929
1930
1931void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01001932 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001933 Comment cmnt(masm_, "[ ContinueStatement");
1934 CodeForStatementPosition(node);
1935 node->target()->continue_target()->Jump();
1936}
1937
1938
1939void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01001940 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 Comment cmnt(masm_, "[ BreakStatement");
1942 CodeForStatementPosition(node);
1943 node->target()->break_target()->Jump();
1944}
1945
1946
1947void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01001948 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001949 Comment cmnt(masm_, "[ ReturnStatement");
1950
1951 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001952 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00001953 if (function_return_is_shadowed_) {
1954 frame_->EmitPop(r0);
1955 function_return_.Jump();
1956 } else {
1957 // Pop the result from the frame and prepare the frame for
1958 // returning thus making it easier to merge.
1959 frame_->EmitPop(r0);
1960 frame_->PrepareForReturn();
1961
1962 function_return_.Jump();
1963 }
1964}
1965
1966
1967void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1968#ifdef DEBUG
1969 int original_height = frame_->height();
1970#endif
Steve Block6ded16b2010-05-10 14:33:55 +01001971 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001972 Comment cmnt(masm_, "[ WithEnterStatement");
1973 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001974 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 if (node->is_catch_block()) {
1976 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1977 } else {
1978 frame_->CallRuntime(Runtime::kPushContext, 1);
1979 }
1980#ifdef DEBUG
1981 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01001982 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001983 verified_true.Branch(eq);
1984 __ stop("PushContext: r0 is expected to be the same as cp");
1985 verified_true.Bind();
1986#endif
1987 // Update context local.
1988 __ str(cp, frame_->Context());
1989 ASSERT(frame_->height() == original_height);
1990}
1991
1992
1993void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1994#ifdef DEBUG
1995 int original_height = frame_->height();
1996#endif
Steve Block6ded16b2010-05-10 14:33:55 +01001997 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001998 Comment cmnt(masm_, "[ WithExitStatement");
1999 CodeForStatementPosition(node);
2000 // Pop context.
2001 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2002 // Update context local.
2003 __ str(cp, frame_->Context());
2004 ASSERT(frame_->height() == original_height);
2005}
2006
2007
2008void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2009#ifdef DEBUG
2010 int original_height = frame_->height();
2011#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002012 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002013 Comment cmnt(masm_, "[ SwitchStatement");
2014 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002015 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002016
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002017 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002018
2019 JumpTarget next_test;
2020 JumpTarget fall_through;
2021 JumpTarget default_entry;
2022 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2023 ZoneList<CaseClause*>* cases = node->cases();
2024 int length = cases->length();
2025 CaseClause* default_clause = NULL;
2026
2027 for (int i = 0; i < length; i++) {
2028 CaseClause* clause = cases->at(i);
2029 if (clause->is_default()) {
2030 // Remember the default clause and compile it at the end.
2031 default_clause = clause;
2032 continue;
2033 }
2034
2035 Comment cmnt(masm_, "[ Case clause");
2036 // Compile the test.
2037 next_test.Bind();
2038 next_test.Unuse();
2039 // Duplicate TOS.
2040 __ ldr(r0, frame_->Top());
2041 frame_->EmitPush(r0);
2042 Comparison(eq, NULL, clause->label(), true);
2043 Branch(false, &next_test);
2044
2045 // Before entering the body from the test, remove the switch value from
2046 // the stack.
2047 frame_->Drop();
2048
2049 // Label the body so that fall through is enabled.
2050 if (i > 0 && cases->at(i - 1)->is_default()) {
2051 default_exit.Bind();
2052 } else {
2053 fall_through.Bind();
2054 fall_through.Unuse();
2055 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002056 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002057
2058 // If control flow can fall through from the body, jump to the next body
2059 // or the end of the statement.
2060 if (frame_ != NULL) {
2061 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2062 default_entry.Jump();
2063 } else {
2064 fall_through.Jump();
2065 }
2066 }
2067 }
2068
2069 // The final "test" removes the switch value.
2070 next_test.Bind();
2071 frame_->Drop();
2072
2073 // If there is a default clause, compile it.
2074 if (default_clause != NULL) {
2075 Comment cmnt(masm_, "[ Default clause");
2076 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002077 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 // If control flow can fall out of the default and there is a case after
2079 // it, jup to that case's body.
2080 if (frame_ != NULL && default_exit.is_bound()) {
2081 default_exit.Jump();
2082 }
2083 }
2084
2085 if (fall_through.is_linked()) {
2086 fall_through.Bind();
2087 }
2088
2089 if (node->break_target()->is_linked()) {
2090 node->break_target()->Bind();
2091 }
2092 node->break_target()->Unuse();
2093 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2094}
2095
2096
Steve Block3ce2e202009-11-05 08:53:23 +00002097void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002098#ifdef DEBUG
2099 int original_height = frame_->height();
2100#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002101 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002102 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002103 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002104 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002105 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002106 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002107
Steve Block3ce2e202009-11-05 08:53:23 +00002108 // Label the top of the loop for the backward CFG edge. If the test
2109 // is always true we can use the continue target, and if the test is
2110 // always false there is no need.
2111 ConditionAnalysis info = AnalyzeCondition(node->cond());
2112 switch (info) {
2113 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002114 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002116 break;
2117 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002118 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002119 break;
2120 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002121 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002122 body.Bind();
2123 break;
2124 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002125
Steve Block3ce2e202009-11-05 08:53:23 +00002126 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002127 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002128
Steve Blockd0582a62009-12-15 09:54:21 +00002129 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002130 switch (info) {
2131 case ALWAYS_TRUE:
2132 // If control can fall off the end of the body, jump back to the
2133 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002134 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002135 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002136 }
2137 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002138 case ALWAYS_FALSE:
2139 // If we have a continue in the body, we only have to bind its
2140 // jump target.
2141 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002143 }
Steve Block3ce2e202009-11-05 08:53:23 +00002144 break;
2145 case DONT_KNOW:
2146 // We have to compile the test expression if it can be reached by
2147 // control flow falling out of the body or via continue.
2148 if (node->continue_target()->is_linked()) {
2149 node->continue_target()->Bind();
2150 }
2151 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002152 Comment cmnt(masm_, "[ DoWhileCondition");
2153 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002154 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002156 // A invalid frame here indicates that control did not
2157 // fall out of the test expression.
2158 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 }
2160 }
2161 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002162 }
2163
2164 if (node->break_target()->is_linked()) {
2165 node->break_target()->Bind();
2166 }
Steve Block6ded16b2010-05-10 14:33:55 +01002167 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002168 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2169}
2170
2171
2172void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2173#ifdef DEBUG
2174 int original_height = frame_->height();
2175#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002176 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002177 Comment cmnt(masm_, "[ WhileStatement");
2178 CodeForStatementPosition(node);
2179
2180 // If the test is never true and has no side effects there is no need
2181 // to compile the test or body.
2182 ConditionAnalysis info = AnalyzeCondition(node->cond());
2183 if (info == ALWAYS_FALSE) return;
2184
Kristian Monsen25f61362010-05-21 11:50:48 +01002185 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002186 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002187
2188 // Label the top of the loop with the continue target for the backward
2189 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002190 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002191 node->continue_target()->Bind();
2192
2193 if (info == DONT_KNOW) {
2194 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002195 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002196 if (has_valid_frame()) {
2197 // A NULL frame indicates that control did not fall out of the
2198 // test expression.
2199 Branch(false, node->break_target());
2200 }
2201 if (has_valid_frame() || body.is_linked()) {
2202 body.Bind();
2203 }
2204 }
2205
2206 if (has_valid_frame()) {
2207 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002208 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002209
2210 // If control flow can fall out of the body, jump back to the top.
2211 if (has_valid_frame()) {
2212 node->continue_target()->Jump();
2213 }
2214 }
2215 if (node->break_target()->is_linked()) {
2216 node->break_target()->Bind();
2217 }
Steve Block6ded16b2010-05-10 14:33:55 +01002218 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002219 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2220}
2221
2222
2223void CodeGenerator::VisitForStatement(ForStatement* node) {
2224#ifdef DEBUG
2225 int original_height = frame_->height();
2226#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002227 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002228 Comment cmnt(masm_, "[ ForStatement");
2229 CodeForStatementPosition(node);
2230 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002231 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002232 }
2233
2234 // If the test is never true there is no need to compile the test or
2235 // body.
2236 ConditionAnalysis info = AnalyzeCondition(node->cond());
2237 if (info == ALWAYS_FALSE) return;
2238
Kristian Monsen25f61362010-05-21 11:50:48 +01002239 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002240 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002241
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002242 // We know that the loop index is a smi if it is not modified in the
2243 // loop body and it is checked against a constant limit in the loop
2244 // condition. In this case, we reset the static type information of the
2245 // loop index to smi before compiling the body, the update expression, and
2246 // the bottom check of the loop condition.
2247 TypeInfoCodeGenState type_info_scope(this,
2248 node->is_fast_smi_loop() ?
2249 node->loop_variable()->slot() :
2250 NULL,
2251 TypeInfo::Smi());
2252
Steve Block3ce2e202009-11-05 08:53:23 +00002253 // If there is no update statement, label the top of the loop with the
2254 // continue target, otherwise with the loop target.
2255 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2256 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002257 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002258 node->continue_target()->Bind();
2259 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002260 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002261 loop.Bind();
2262 }
2263
2264 // If the test is always true, there is no need to compile it.
2265 if (info == DONT_KNOW) {
2266 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002267 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002268 if (has_valid_frame()) {
2269 Branch(false, node->break_target());
2270 }
2271 if (has_valid_frame() || body.is_linked()) {
2272 body.Bind();
2273 }
2274 }
2275
2276 if (has_valid_frame()) {
2277 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002278 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002279
2280 if (node->next() == NULL) {
2281 // If there is no update statement and control flow can fall out
2282 // of the loop, jump directly to the continue label.
2283 if (has_valid_frame()) {
2284 node->continue_target()->Jump();
2285 }
2286 } else {
2287 // If there is an update statement and control flow can reach it
2288 // via falling out of the body of the loop or continuing, we
2289 // compile the update statement.
2290 if (node->continue_target()->is_linked()) {
2291 node->continue_target()->Bind();
2292 }
2293 if (has_valid_frame()) {
2294 // Record source position of the statement as this code which is
2295 // after the code for the body actually belongs to the loop
2296 // statement and not the body.
2297 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002298 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002299 loop.Jump();
2300 }
2301 }
2302 }
2303 if (node->break_target()->is_linked()) {
2304 node->break_target()->Bind();
2305 }
Steve Block6ded16b2010-05-10 14:33:55 +01002306 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002307 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2308}
2309
2310
2311void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2312#ifdef DEBUG
2313 int original_height = frame_->height();
2314#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002315 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002316 Comment cmnt(masm_, "[ ForInStatement");
2317 CodeForStatementPosition(node);
2318
2319 JumpTarget primitive;
2320 JumpTarget jsobject;
2321 JumpTarget fixed_array;
2322 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2323 JumpTarget end_del_check;
2324 JumpTarget exit;
2325
2326 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002327 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002328
2329 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2330 // to the specification. 12.6.4 mandates a call to ToObject.
2331 frame_->EmitPop(r0);
2332 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2333 __ cmp(r0, ip);
2334 exit.Branch(eq);
2335 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2336 __ cmp(r0, ip);
2337 exit.Branch(eq);
2338
2339 // Stack layout in body:
2340 // [iteration counter (Smi)]
2341 // [length of array]
2342 // [FixedArray]
2343 // [Map or 0]
2344 // [Object]
2345
2346 // Check if enumerable is already a JSObject
2347 __ tst(r0, Operand(kSmiTagMask));
2348 primitive.Branch(eq);
2349 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2350 jsobject.Branch(hs);
2351
2352 primitive.Bind();
2353 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002354 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002355
2356 jsobject.Bind();
2357 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002358 // r0: value to be iterated over
2359 frame_->EmitPush(r0); // Push the object being iterated over.
2360
2361 // Check cache validity in generated code. This is a fast case for
2362 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2363 // guarantee cache validity, call the runtime system to check cache
2364 // validity or get the property names in a fixed array.
2365 JumpTarget call_runtime;
2366 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2367 JumpTarget check_prototype;
2368 JumpTarget use_cache;
2369 __ mov(r1, Operand(r0));
2370 loop.Bind();
2371 // Check that there are no elements.
2372 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2373 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2374 __ cmp(r2, r4);
2375 call_runtime.Branch(ne);
2376 // Check that instance descriptors are not empty so that we can
2377 // check for an enum cache. Leave the map in r3 for the subsequent
2378 // prototype load.
2379 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2380 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2381 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2382 __ cmp(r2, ip);
2383 call_runtime.Branch(eq);
2384 // Check that there in an enum cache in the non-empty instance
2385 // descriptors. This is the case if the next enumeration index
2386 // field does not contain a smi.
2387 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2388 __ tst(r2, Operand(kSmiTagMask));
2389 call_runtime.Branch(eq);
2390 // For all objects but the receiver, check that the cache is empty.
2391 // r4: empty fixed array root.
2392 __ cmp(r1, r0);
2393 check_prototype.Branch(eq);
2394 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2395 __ cmp(r2, r4);
2396 call_runtime.Branch(ne);
2397 check_prototype.Bind();
2398 // Load the prototype from the map and loop if non-null.
2399 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2400 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2401 __ cmp(r1, ip);
2402 loop.Branch(ne);
2403 // The enum cache is valid. Load the map of the object being
2404 // iterated over and use the cache for the iteration.
2405 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2406 use_cache.Jump();
2407
2408 call_runtime.Bind();
2409 // Call the runtime to get the property names for the object.
2410 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002411 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2412
Steve Blockd0582a62009-12-15 09:54:21 +00002413 // If we got a map from the runtime call, we can do a fast
2414 // modification check. Otherwise, we got a fixed array, and we have
2415 // to do a slow check.
2416 // r0: map or fixed array (result from call to
2417 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002418 __ mov(r2, Operand(r0));
2419 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2420 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2421 __ cmp(r1, ip);
2422 fixed_array.Branch(ne);
2423
Steve Blockd0582a62009-12-15 09:54:21 +00002424 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002425 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002426 // r0: map (either the result from a call to
2427 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2428 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002429 __ mov(r1, Operand(r0));
2430 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2431 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2432 __ ldr(r2,
2433 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2434
2435 frame_->EmitPush(r0); // map
2436 frame_->EmitPush(r2); // enum cache bridge cache
2437 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002438 frame_->EmitPush(r0);
2439 __ mov(r0, Operand(Smi::FromInt(0)));
2440 frame_->EmitPush(r0);
2441 entry.Jump();
2442
2443 fixed_array.Bind();
2444 __ mov(r1, Operand(Smi::FromInt(0)));
2445 frame_->EmitPush(r1); // insert 0 in place of Map
2446 frame_->EmitPush(r0);
2447
2448 // Push the length of the array and the initial index onto the stack.
2449 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002450 frame_->EmitPush(r0);
2451 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2452 frame_->EmitPush(r0);
2453
2454 // Condition.
2455 entry.Bind();
2456 // sp[0] : index
2457 // sp[1] : array/enum cache length
2458 // sp[2] : array or enum cache
2459 // sp[3] : 0 or map
2460 // sp[4] : enumerable
2461 // Grab the current frame's height for the break and continue
2462 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002463 node->break_target()->SetExpectedHeight();
2464 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002465
Kristian Monsen25f61362010-05-21 11:50:48 +01002466 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002467 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002468 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002469 node->break_target()->Branch(hs);
2470
Steve Blocka7e24c12009-10-30 11:49:00 +00002471 // Get the i'th entry of the array.
2472 __ ldr(r2, frame_->ElementAt(2));
2473 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2474 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2475
2476 // Get Map or 0.
2477 __ ldr(r2, frame_->ElementAt(3));
2478 // Check if this (still) matches the map of the enumerable.
2479 // If not, we have to filter the key.
2480 __ ldr(r1, frame_->ElementAt(4));
2481 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2482 __ cmp(r1, Operand(r2));
2483 end_del_check.Branch(eq);
2484
2485 // Convert the entry to a string (or null if it isn't a property anymore).
2486 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2487 frame_->EmitPush(r0);
2488 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002489 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002490 __ mov(r3, Operand(r0));
2491
2492 // If the property has been removed while iterating, we just skip it.
2493 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2494 __ cmp(r3, ip);
2495 node->continue_target()->Branch(eq);
2496
2497 end_del_check.Bind();
2498 // Store the entry in the 'each' expression and take another spin in the
2499 // loop. r3: i'th entry of the enum cache (or string there of)
2500 frame_->EmitPush(r3); // push entry
2501 { Reference each(this, node->each());
2502 if (!each.is_illegal()) {
2503 if (each.size() > 0) {
2504 __ ldr(r0, frame_->ElementAt(each.size()));
2505 frame_->EmitPush(r0);
Leon Clarked91b9f72010-01-27 17:25:45 +00002506 each.SetValue(NOT_CONST_INIT);
2507 frame_->Drop(2);
2508 } else {
2509 // If the reference was to a slot we rely on the convenient property
2510 // that it doesn't matter whether a value (eg, r3 pushed above) is
2511 // right on top of or right underneath a zero-sized reference.
2512 each.SetValue(NOT_CONST_INIT);
2513 frame_->Drop();
Steve Blocka7e24c12009-10-30 11:49:00 +00002514 }
2515 }
2516 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002517 // Body.
2518 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002519 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002520
2521 // Next. Reestablish a spilled frame in case we are coming here via
2522 // a continue in the body.
2523 node->continue_target()->Bind();
2524 frame_->SpillAll();
2525 frame_->EmitPop(r0);
2526 __ add(r0, r0, Operand(Smi::FromInt(1)));
2527 frame_->EmitPush(r0);
2528 entry.Jump();
2529
2530 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2531 // any frame.
2532 node->break_target()->Bind();
2533 frame_->Drop(5);
2534
2535 // Exit.
2536 exit.Bind();
2537 node->continue_target()->Unuse();
2538 node->break_target()->Unuse();
2539 ASSERT(frame_->height() == original_height);
2540}
2541
2542
Steve Block3ce2e202009-11-05 08:53:23 +00002543void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002544#ifdef DEBUG
2545 int original_height = frame_->height();
2546#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002547 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002548 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002549 CodeForStatementPosition(node);
2550
2551 JumpTarget try_block;
2552 JumpTarget exit;
2553
2554 try_block.Call();
2555 // --- Catch block ---
2556 frame_->EmitPush(r0);
2557
2558 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002559 Variable* catch_var = node->catch_var()->var();
2560 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
2561 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002562
2563 // Remove the exception from the stack.
2564 frame_->Drop();
2565
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002566 VisitStatements(node->catch_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002567 if (frame_ != NULL) {
2568 exit.Jump();
2569 }
2570
2571
2572 // --- Try block ---
2573 try_block.Bind();
2574
2575 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2576 int handler_height = frame_->height();
2577
2578 // Shadow the labels for all escapes from the try block, including
2579 // returns. During shadowing, the original label is hidden as the
2580 // LabelShadow and operations on the original actually affect the
2581 // shadowing label.
2582 //
2583 // We should probably try to unify the escaping labels and the return
2584 // label.
2585 int nof_escapes = node->escaping_targets()->length();
2586 List<ShadowTarget*> shadows(1 + nof_escapes);
2587
2588 // Add the shadow target for the function return.
2589 static const int kReturnShadowIndex = 0;
2590 shadows.Add(new ShadowTarget(&function_return_));
2591 bool function_return_was_shadowed = function_return_is_shadowed_;
2592 function_return_is_shadowed_ = true;
2593 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2594
2595 // Add the remaining shadow targets.
2596 for (int i = 0; i < nof_escapes; i++) {
2597 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2598 }
2599
2600 // Generate code for the statements in the try block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002601 VisitStatements(node->try_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002602
2603 // Stop the introduced shadowing and count the number of required unlinks.
2604 // After shadowing stops, the original labels are unshadowed and the
2605 // LabelShadows represent the formerly shadowing labels.
2606 bool has_unlinks = false;
2607 for (int i = 0; i < shadows.length(); i++) {
2608 shadows[i]->StopShadowing();
2609 has_unlinks = has_unlinks || shadows[i]->is_linked();
2610 }
2611 function_return_is_shadowed_ = function_return_was_shadowed;
2612
2613 // Get an external reference to the handler address.
2614 ExternalReference handler_address(Top::k_handler_address);
2615
2616 // If we can fall off the end of the try block, unlink from try chain.
2617 if (has_valid_frame()) {
2618 // The next handler address is on top of the frame. Unlink from
2619 // the handler list and drop the rest of this handler from the
2620 // frame.
2621 ASSERT(StackHandlerConstants::kNextOffset == 0);
2622 frame_->EmitPop(r1);
2623 __ mov(r3, Operand(handler_address));
2624 __ str(r1, MemOperand(r3));
2625 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2626 if (has_unlinks) {
2627 exit.Jump();
2628 }
2629 }
2630
2631 // Generate unlink code for the (formerly) shadowing labels that have been
2632 // jumped to. Deallocate each shadow target.
2633 for (int i = 0; i < shadows.length(); i++) {
2634 if (shadows[i]->is_linked()) {
2635 // Unlink from try chain;
2636 shadows[i]->Bind();
2637 // Because we can be jumping here (to spilled code) from unspilled
2638 // code, we need to reestablish a spilled frame at this block.
2639 frame_->SpillAll();
2640
2641 // Reload sp from the top handler, because some statements that we
2642 // break from (eg, for...in) may have left stuff on the stack.
2643 __ mov(r3, Operand(handler_address));
2644 __ ldr(sp, MemOperand(r3));
2645 frame_->Forget(frame_->height() - handler_height);
2646
2647 ASSERT(StackHandlerConstants::kNextOffset == 0);
2648 frame_->EmitPop(r1);
2649 __ str(r1, MemOperand(r3));
2650 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2651
2652 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2653 frame_->PrepareForReturn();
2654 }
2655 shadows[i]->other_target()->Jump();
2656 }
2657 }
2658
2659 exit.Bind();
2660 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2661}
2662
2663
Steve Block3ce2e202009-11-05 08:53:23 +00002664void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002665#ifdef DEBUG
2666 int original_height = frame_->height();
2667#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002668 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002669 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002670 CodeForStatementPosition(node);
2671
2672 // State: Used to keep track of reason for entering the finally
2673 // block. Should probably be extended to hold information for
2674 // break/continue from within the try block.
2675 enum { FALLING, THROWING, JUMPING };
2676
2677 JumpTarget try_block;
2678 JumpTarget finally_block;
2679
2680 try_block.Call();
2681
2682 frame_->EmitPush(r0); // save exception object on the stack
2683 // In case of thrown exceptions, this is where we continue.
2684 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2685 finally_block.Jump();
2686
2687 // --- Try block ---
2688 try_block.Bind();
2689
2690 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2691 int handler_height = frame_->height();
2692
2693 // Shadow the labels for all escapes from the try block, including
2694 // returns. Shadowing hides the original label as the LabelShadow and
2695 // operations on the original actually affect the shadowing label.
2696 //
2697 // We should probably try to unify the escaping labels and the return
2698 // label.
2699 int nof_escapes = node->escaping_targets()->length();
2700 List<ShadowTarget*> shadows(1 + nof_escapes);
2701
2702 // Add the shadow target for the function return.
2703 static const int kReturnShadowIndex = 0;
2704 shadows.Add(new ShadowTarget(&function_return_));
2705 bool function_return_was_shadowed = function_return_is_shadowed_;
2706 function_return_is_shadowed_ = true;
2707 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2708
2709 // Add the remaining shadow targets.
2710 for (int i = 0; i < nof_escapes; i++) {
2711 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2712 }
2713
2714 // Generate code for the statements in the try block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002715 VisitStatements(node->try_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002716
2717 // Stop the introduced shadowing and count the number of required unlinks.
2718 // After shadowing stops, the original labels are unshadowed and the
2719 // LabelShadows represent the formerly shadowing labels.
2720 int nof_unlinks = 0;
2721 for (int i = 0; i < shadows.length(); i++) {
2722 shadows[i]->StopShadowing();
2723 if (shadows[i]->is_linked()) nof_unlinks++;
2724 }
2725 function_return_is_shadowed_ = function_return_was_shadowed;
2726
2727 // Get an external reference to the handler address.
2728 ExternalReference handler_address(Top::k_handler_address);
2729
2730 // If we can fall off the end of the try block, unlink from the try
2731 // chain and set the state on the frame to FALLING.
2732 if (has_valid_frame()) {
2733 // The next handler address is on top of the frame.
2734 ASSERT(StackHandlerConstants::kNextOffset == 0);
2735 frame_->EmitPop(r1);
2736 __ mov(r3, Operand(handler_address));
2737 __ str(r1, MemOperand(r3));
2738 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2739
2740 // Fake a top of stack value (unneeded when FALLING) and set the
2741 // state in r2, then jump around the unlink blocks if any.
2742 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2743 frame_->EmitPush(r0);
2744 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2745 if (nof_unlinks > 0) {
2746 finally_block.Jump();
2747 }
2748 }
2749
2750 // Generate code to unlink and set the state for the (formerly)
2751 // shadowing targets that have been jumped to.
2752 for (int i = 0; i < shadows.length(); i++) {
2753 if (shadows[i]->is_linked()) {
2754 // If we have come from the shadowed return, the return value is
2755 // in (a non-refcounted reference to) r0. We must preserve it
2756 // until it is pushed.
2757 //
2758 // Because we can be jumping here (to spilled code) from
2759 // unspilled code, we need to reestablish a spilled frame at
2760 // this block.
2761 shadows[i]->Bind();
2762 frame_->SpillAll();
2763
2764 // Reload sp from the top handler, because some statements that
2765 // we break from (eg, for...in) may have left stuff on the
2766 // stack.
2767 __ mov(r3, Operand(handler_address));
2768 __ ldr(sp, MemOperand(r3));
2769 frame_->Forget(frame_->height() - handler_height);
2770
2771 // Unlink this handler and drop it from the frame. The next
2772 // handler address is currently on top of the frame.
2773 ASSERT(StackHandlerConstants::kNextOffset == 0);
2774 frame_->EmitPop(r1);
2775 __ str(r1, MemOperand(r3));
2776 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2777
2778 if (i == kReturnShadowIndex) {
2779 // If this label shadowed the function return, materialize the
2780 // return value on the stack.
2781 frame_->EmitPush(r0);
2782 } else {
2783 // Fake TOS for targets that shadowed breaks and continues.
2784 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2785 frame_->EmitPush(r0);
2786 }
2787 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
2788 if (--nof_unlinks > 0) {
2789 // If this is not the last unlink block, jump around the next.
2790 finally_block.Jump();
2791 }
2792 }
2793 }
2794
2795 // --- Finally block ---
2796 finally_block.Bind();
2797
2798 // Push the state on the stack.
2799 frame_->EmitPush(r2);
2800
2801 // We keep two elements on the stack - the (possibly faked) result
2802 // and the state - while evaluating the finally block.
2803 //
2804 // Generate code for the statements in the finally block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002805 VisitStatements(node->finally_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002806
2807 if (has_valid_frame()) {
2808 // Restore state and return value or faked TOS.
2809 frame_->EmitPop(r2);
2810 frame_->EmitPop(r0);
2811 }
2812
2813 // Generate code to jump to the right destination for all used
2814 // formerly shadowing targets. Deallocate each shadow target.
2815 for (int i = 0; i < shadows.length(); i++) {
2816 if (has_valid_frame() && shadows[i]->is_bound()) {
2817 JumpTarget* original = shadows[i]->other_target();
2818 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
2819 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2820 JumpTarget skip;
2821 skip.Branch(ne);
2822 frame_->PrepareForReturn();
2823 original->Jump();
2824 skip.Bind();
2825 } else {
2826 original->Branch(eq);
2827 }
2828 }
2829 }
2830
2831 if (has_valid_frame()) {
2832 // Check if we need to rethrow the exception.
2833 JumpTarget exit;
2834 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
2835 exit.Branch(ne);
2836
2837 // Rethrow exception.
2838 frame_->EmitPush(r0);
2839 frame_->CallRuntime(Runtime::kReThrow, 1);
2840
2841 // Done.
2842 exit.Bind();
2843 }
2844 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2845}
2846
2847
2848void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2849#ifdef DEBUG
2850 int original_height = frame_->height();
2851#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002852 Comment cmnt(masm_, "[ DebuggerStatament");
2853 CodeForStatementPosition(node);
2854#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002855 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00002856#endif
2857 // Ignore the return value.
2858 ASSERT(frame_->height() == original_height);
2859}
2860
2861
Steve Block6ded16b2010-05-10 14:33:55 +01002862void CodeGenerator::InstantiateFunction(
2863 Handle<SharedFunctionInfo> function_info) {
Leon Clarkee46be812010-01-19 14:06:41 +00002864 // Use the fast case closure allocation code that allocates in new
2865 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01002866 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00002867 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002868 frame_->EmitPush(Operand(function_info));
2869 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00002870 frame_->CallStub(&stub, 1);
2871 frame_->EmitPush(r0);
2872 } else {
2873 // Create a new closure.
2874 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002875 frame_->EmitPush(Operand(function_info));
Leon Clarkee46be812010-01-19 14:06:41 +00002876 frame_->CallRuntime(Runtime::kNewClosure, 2);
2877 frame_->EmitPush(r0);
2878 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002879}
2880
2881
2882void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2883#ifdef DEBUG
2884 int original_height = frame_->height();
2885#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002886 Comment cmnt(masm_, "[ FunctionLiteral");
2887
Steve Block6ded16b2010-05-10 14:33:55 +01002888 // Build the function info and instantiate it.
2889 Handle<SharedFunctionInfo> function_info =
2890 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002891 // Check for stack-overflow exception.
2892 if (HasStackOverflow()) {
2893 ASSERT(frame_->height() == original_height);
2894 return;
2895 }
Steve Block6ded16b2010-05-10 14:33:55 +01002896 InstantiateFunction(function_info);
2897 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002898}
2899
2900
Steve Block6ded16b2010-05-10 14:33:55 +01002901void CodeGenerator::VisitSharedFunctionInfoLiteral(
2902 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002903#ifdef DEBUG
2904 int original_height = frame_->height();
2905#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002906 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
2907 InstantiateFunction(node->shared_function_info());
2908 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002909}
2910
2911
2912void CodeGenerator::VisitConditional(Conditional* node) {
2913#ifdef DEBUG
2914 int original_height = frame_->height();
2915#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002916 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002917 Comment cmnt(masm_, "[ Conditional");
2918 JumpTarget then;
2919 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002920 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002921 if (has_valid_frame()) {
2922 Branch(false, &else_);
2923 }
2924 if (has_valid_frame() || then.is_linked()) {
2925 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002926 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002927 }
2928 if (else_.is_linked()) {
2929 JumpTarget exit;
2930 if (has_valid_frame()) exit.Jump();
2931 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002932 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002933 if (exit.is_linked()) exit.Bind();
2934 }
Steve Block6ded16b2010-05-10 14:33:55 +01002935 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002936}
2937
2938
2939void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002940 if (slot->type() == Slot::LOOKUP) {
2941 ASSERT(slot->var()->is_dynamic());
2942
Steve Block6ded16b2010-05-10 14:33:55 +01002943 // JumpTargets do not yet support merging frames so the frame must be
2944 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00002945 JumpTarget slow;
2946 JumpTarget done;
2947
Kristian Monsen25f61362010-05-21 11:50:48 +01002948 // Generate fast case for loading from slots that correspond to
2949 // local/global variables or arguments unless they are shadowed by
2950 // eval-introduced bindings.
2951 EmitDynamicLoadFromSlotFastCase(slot,
2952 typeof_state,
2953 &slow,
2954 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00002955
2956 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01002957 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002958 frame_->EmitPush(cp);
2959 __ mov(r0, Operand(slot->var()->name()));
2960 frame_->EmitPush(r0);
2961
2962 if (typeof_state == INSIDE_TYPEOF) {
2963 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
2964 } else {
2965 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2966 }
2967
2968 done.Bind();
2969 frame_->EmitPush(r0);
2970
2971 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002972 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002973 TypeInfo info = type_info(slot);
2974 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Blocka7e24c12009-10-30 11:49:00 +00002975 if (slot->var()->mode() == Variable::CONST) {
2976 // Const slots may contain 'the hole' value (the constant hasn't been
2977 // initialized yet) which needs to be converted into the 'undefined'
2978 // value.
2979 Comment cmnt(masm_, "[ Unhole const");
Steve Block6ded16b2010-05-10 14:33:55 +01002980 frame_->EmitPop(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002981 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01002982 __ cmp(scratch, ip);
2983 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq);
2984 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00002985 }
2986 }
2987}
2988
2989
Steve Block6ded16b2010-05-10 14:33:55 +01002990void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
2991 TypeofState state) {
2992 LoadFromSlot(slot, state);
2993
2994 // Bail out quickly if we're not using lazy arguments allocation.
2995 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
2996
2997 // ... or if the slot isn't a non-parameter arguments slot.
2998 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
2999
3000 VirtualFrame::SpilledScope spilled_scope(frame_);
3001
3002 // Load the loaded value from the stack into r0 but leave it on the
3003 // stack.
3004 __ ldr(r0, MemOperand(sp, 0));
3005
3006 // If the loaded value is the sentinel that indicates that we
3007 // haven't loaded the arguments object yet, we need to do it now.
3008 JumpTarget exit;
3009 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3010 __ cmp(r0, ip);
3011 exit.Branch(ne);
3012 frame_->Drop();
3013 StoreArgumentsObject(false);
3014 exit.Bind();
3015}
3016
3017
Leon Clarkee46be812010-01-19 14:06:41 +00003018void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3019 ASSERT(slot != NULL);
3020 if (slot->type() == Slot::LOOKUP) {
Steve Block6ded16b2010-05-10 14:33:55 +01003021 VirtualFrame::SpilledScope spilled_scope(frame_);
Leon Clarkee46be812010-01-19 14:06:41 +00003022 ASSERT(slot->var()->is_dynamic());
3023
3024 // For now, just do a runtime call.
3025 frame_->EmitPush(cp);
3026 __ mov(r0, Operand(slot->var()->name()));
3027 frame_->EmitPush(r0);
3028
3029 if (init_state == CONST_INIT) {
3030 // Same as the case for a normal store, but ignores attribute
3031 // (e.g. READ_ONLY) of context slot so that we can initialize
3032 // const properties (introduced via eval("const foo = (some
3033 // expr);")). Also, uses the current function context instead of
3034 // the top context.
3035 //
3036 // Note that we must declare the foo upon entry of eval(), via a
3037 // context slot declaration, but we cannot initialize it at the
3038 // same time, because the const declaration may be at the end of
3039 // the eval code (sigh...) and the const variable may have been
3040 // used before (where its value is 'undefined'). Thus, we can only
3041 // do the initialization when we actually encounter the expression
3042 // and when the expression operands are defined and valid, and
3043 // thus we need the split into 2 operations: declaration of the
3044 // context slot followed by initialization.
3045 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3046 } else {
3047 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3048 }
3049 // Storing a variable must keep the (new) value on the expression
3050 // stack. This is necessary for compiling assignment expressions.
3051 frame_->EmitPush(r0);
3052
3053 } else {
3054 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003055 Register scratch = VirtualFrame::scratch0();
3056 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003057
Steve Block6ded16b2010-05-10 14:33:55 +01003058 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003059 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003060
Leon Clarkee46be812010-01-19 14:06:41 +00003061 if (init_state == CONST_INIT) {
3062 ASSERT(slot->var()->mode() == Variable::CONST);
3063 // Only the first const initialization must be executed (the slot
3064 // still contains 'the hole' value). When the assignment is
3065 // executed, the code is identical to a normal store (see below).
3066 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003067 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003068 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003069 __ cmp(scratch, ip);
3070 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003071 exit.Branch(ne);
3072 }
3073
3074 // We must execute the store. Storing a variable must keep the
3075 // (new) value on the stack. This is necessary for compiling
3076 // assignment expressions.
3077 //
3078 // Note: We will reach here even with slot->var()->mode() ==
3079 // Variable::CONST because of const declarations which will
3080 // initialize consts to 'the hole' value and by doing so, end up
3081 // calling this code. r2 may be loaded with context; used below in
3082 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003083 Register tos = frame_->Peek();
3084 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003085 if (slot->type() == Slot::CONTEXT) {
3086 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003087 __ tst(tos, Operand(kSmiTagMask));
3088 // We don't use tos any more after here.
3089 VirtualFrame::SpilledScope spilled_scope(frame_);
Leon Clarkee46be812010-01-19 14:06:41 +00003090 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003091 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003092 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
3093 __ mov(r3, Operand(offset));
Steve Block6ded16b2010-05-10 14:33:55 +01003094 // r1 could be identical with tos, but that doesn't matter.
3095 __ RecordWrite(scratch, r3, r1);
Leon Clarkee46be812010-01-19 14:06:41 +00003096 }
3097 // If we definitely did not jump over the assignment, we do not need
3098 // to bind the exit label. Doing so can defeat peephole
3099 // optimization.
3100 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
Steve Block6ded16b2010-05-10 14:33:55 +01003101 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003102 exit.Bind();
3103 }
3104 }
3105}
3106
3107
Steve Blocka7e24c12009-10-30 11:49:00 +00003108void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3109 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003110 JumpTarget* slow) {
3111 // Check that no extension objects have been created by calls to
3112 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003113 Register tmp = frame_->scratch0();
3114 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003115 Register context = cp;
3116 Scope* s = scope();
3117 while (s != NULL) {
3118 if (s->num_heap_slots() > 0) {
3119 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003120 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003121 // Check that extension is NULL.
3122 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3123 __ tst(tmp2, tmp2);
3124 slow->Branch(ne);
3125 }
3126 // Load next context in chain.
3127 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3128 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3129 context = tmp;
3130 }
3131 // If no outer scope calls eval, we do not need to check more
3132 // context extensions.
3133 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3134 s = s->outer_scope();
3135 }
3136
3137 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003138 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003139 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003140 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003141 __ bind(&next);
3142 // Terminate at global context.
3143 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3144 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3145 __ cmp(tmp2, ip);
3146 __ b(eq, &fast);
3147 // Check that extension is NULL.
3148 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3149 __ tst(tmp2, tmp2);
3150 slow->Branch(ne);
3151 // Load next context in chain.
3152 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3153 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3154 __ b(&next);
3155 __ bind(&fast);
3156 }
3157
Steve Blocka7e24c12009-10-30 11:49:00 +00003158 // Load the global object.
3159 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003160 // Setup the name register and call load IC.
3161 frame_->CallLoadIC(slot->var()->name(),
3162 typeof_state == INSIDE_TYPEOF
3163 ? RelocInfo::CODE_TARGET
3164 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003165}
3166
3167
Kristian Monsen25f61362010-05-21 11:50:48 +01003168void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3169 TypeofState typeof_state,
3170 JumpTarget* slow,
3171 JumpTarget* done) {
3172 // Generate fast-case code for variables that might be shadowed by
3173 // eval-introduced variables. Eval is used a lot without
3174 // introducing variables. In those cases, we do not want to
3175 // perform a runtime call for all variables in the scope
3176 // containing the eval.
3177 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3178 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3179 frame_->SpillAll();
3180 done->Jump();
3181
3182 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3183 frame_->SpillAll();
3184 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
3185 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3186 if (potential_slot != NULL) {
3187 // Generate fast case for locals that rewrite to slots.
3188 __ ldr(r0,
3189 ContextSlotOperandCheckExtensions(potential_slot,
3190 r1,
3191 r2,
3192 slow));
3193 if (potential_slot->var()->mode() == Variable::CONST) {
3194 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3195 __ cmp(r0, ip);
3196 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3197 }
3198 done->Jump();
3199 } else if (rewrite != NULL) {
3200 // Generate fast case for argument loads.
3201 Property* property = rewrite->AsProperty();
3202 if (property != NULL) {
3203 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3204 Literal* key_literal = property->key()->AsLiteral();
3205 if (obj_proxy != NULL &&
3206 key_literal != NULL &&
3207 obj_proxy->IsArguments() &&
3208 key_literal->handle()->IsSmi()) {
3209 // Load arguments object if there are no eval-introduced
3210 // variables. Then load the argument from the arguments
3211 // object using keyed load.
3212 __ ldr(r0,
3213 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
3214 r1,
3215 r2,
3216 slow));
3217 frame_->EmitPush(r0);
3218 __ mov(r1, Operand(key_literal->handle()));
3219 frame_->EmitPush(r1);
3220 EmitKeyedLoad();
3221 done->Jump();
3222 }
3223 }
3224 }
3225 }
3226}
3227
3228
Steve Blocka7e24c12009-10-30 11:49:00 +00003229void CodeGenerator::VisitSlot(Slot* node) {
3230#ifdef DEBUG
3231 int original_height = frame_->height();
3232#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003233 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003234 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3235 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003236}
3237
3238
3239void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3240#ifdef DEBUG
3241 int original_height = frame_->height();
3242#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003243 Comment cmnt(masm_, "[ VariableProxy");
3244
3245 Variable* var = node->var();
3246 Expression* expr = var->rewrite();
3247 if (expr != NULL) {
3248 Visit(expr);
3249 } else {
3250 ASSERT(var->is_global());
3251 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003252 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003253 }
Steve Block6ded16b2010-05-10 14:33:55 +01003254 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003255}
3256
3257
3258void CodeGenerator::VisitLiteral(Literal* node) {
3259#ifdef DEBUG
3260 int original_height = frame_->height();
3261#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003262 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003263 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003264 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003265 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003266 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003267 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003268}
3269
3270
3271void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3272#ifdef DEBUG
3273 int original_height = frame_->height();
3274#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003275 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003276 Comment cmnt(masm_, "[ RexExp Literal");
3277
3278 // Retrieve the literal array and check the allocated entry.
3279
3280 // Load the function of this activation.
3281 __ ldr(r1, frame_->Function());
3282
3283 // Load the literals array of the function.
3284 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
3285
3286 // Load the literal at the ast saved index.
3287 int literal_offset =
3288 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
3289 __ ldr(r2, FieldMemOperand(r1, literal_offset));
3290
3291 JumpTarget done;
3292 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3293 __ cmp(r2, ip);
3294 done.Branch(ne);
3295
3296 // If the entry is undefined we call the runtime system to computed
3297 // the literal.
3298 frame_->EmitPush(r1); // literal array (0)
3299 __ mov(r0, Operand(Smi::FromInt(node->literal_index())));
3300 frame_->EmitPush(r0); // literal index (1)
3301 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2)
3302 frame_->EmitPush(r0);
3303 __ mov(r0, Operand(node->flags())); // RegExp flags (3)
3304 frame_->EmitPush(r0);
3305 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
3306 __ mov(r2, Operand(r0));
3307
3308 done.Bind();
3309 // Push the literal.
3310 frame_->EmitPush(r2);
Steve Block6ded16b2010-05-10 14:33:55 +01003311 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003312}
3313
3314
Steve Blocka7e24c12009-10-30 11:49:00 +00003315void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3316#ifdef DEBUG
3317 int original_height = frame_->height();
3318#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003319 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003320 Comment cmnt(masm_, "[ ObjectLiteral");
3321
Steve Blocka7e24c12009-10-30 11:49:00 +00003322 // Load the function of this activation.
Steve Block6ded16b2010-05-10 14:33:55 +01003323 __ ldr(r3, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003324 // Literal array.
Steve Block6ded16b2010-05-10 14:33:55 +01003325 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00003326 // Literal index.
Steve Block6ded16b2010-05-10 14:33:55 +01003327 __ mov(r2, Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003328 // Constant properties.
Steve Block6ded16b2010-05-10 14:33:55 +01003329 __ mov(r1, Operand(node->constant_properties()));
3330 // Should the object literal have fast elements?
3331 __ mov(r0, Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
3332 frame_->EmitPushMultiple(4, r3.bit() | r2.bit() | r1.bit() | r0.bit());
Leon Clarkee46be812010-01-19 14:06:41 +00003333 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003334 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003335 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003336 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003337 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003338 frame_->EmitPush(r0); // save the result
Steve Blocka7e24c12009-10-30 11:49:00 +00003339 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003340 // At the start of each iteration, the top of stack contains
3341 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003342 ObjectLiteral::Property* property = node->properties()->at(i);
3343 Literal* key = property->key();
3344 Expression* value = property->value();
3345 switch (property->kind()) {
3346 case ObjectLiteral::Property::CONSTANT:
3347 break;
3348 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3349 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3350 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003351 case ObjectLiteral::Property::COMPUTED:
3352 if (key->handle()->IsSymbol()) {
3353 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003354 Load(value);
Andrei Popescu402d9372010-02-26 13:31:12 +00003355 frame_->EmitPop(r0);
3356 __ mov(r2, Operand(key->handle()));
3357 __ ldr(r1, frame_->Top()); // Load the receiver.
3358 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3359 break;
3360 }
3361 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003362 case ObjectLiteral::Property::PROTOTYPE: {
Andrei Popescu402d9372010-02-26 13:31:12 +00003363 __ ldr(r0, frame_->Top());
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 frame_->EmitPush(r0); // dup the result
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003365 Load(key);
3366 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003367 frame_->CallRuntime(Runtime::kSetProperty, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003368 break;
3369 }
3370 case ObjectLiteral::Property::SETTER: {
Andrei Popescu402d9372010-02-26 13:31:12 +00003371 __ ldr(r0, frame_->Top());
Steve Blocka7e24c12009-10-30 11:49:00 +00003372 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003373 Load(key);
Steve Blocka7e24c12009-10-30 11:49:00 +00003374 __ mov(r0, Operand(Smi::FromInt(1)));
3375 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003376 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003377 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003378 break;
3379 }
3380 case ObjectLiteral::Property::GETTER: {
Andrei Popescu402d9372010-02-26 13:31:12 +00003381 __ ldr(r0, frame_->Top());
Steve Blocka7e24c12009-10-30 11:49:00 +00003382 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003383 Load(key);
Steve Blocka7e24c12009-10-30 11:49:00 +00003384 __ mov(r0, Operand(Smi::FromInt(0)));
3385 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003386 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003387 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003388 break;
3389 }
3390 }
3391 }
Steve Block6ded16b2010-05-10 14:33:55 +01003392 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003393}
3394
3395
Steve Blocka7e24c12009-10-30 11:49:00 +00003396void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3397#ifdef DEBUG
3398 int original_height = frame_->height();
3399#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003400 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003401 Comment cmnt(masm_, "[ ArrayLiteral");
3402
Steve Blocka7e24c12009-10-30 11:49:00 +00003403 // Load the function of this activation.
Leon Clarkee46be812010-01-19 14:06:41 +00003404 __ ldr(r2, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003405 // Load the literals array of the function.
Leon Clarkee46be812010-01-19 14:06:41 +00003406 __ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00003407 __ mov(r1, Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003408 __ mov(r0, Operand(node->constant_elements()));
3409 frame_->EmitPushMultiple(3, r2.bit() | r1.bit() | r0.bit());
Andrei Popescu402d9372010-02-26 13:31:12 +00003410 int length = node->values()->length();
Leon Clarkee46be812010-01-19 14:06:41 +00003411 if (node->depth() > 1) {
3412 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003413 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003414 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003415 } else {
3416 FastCloneShallowArrayStub stub(length);
3417 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003418 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003419 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003420 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003421
3422 // Generate code to set the elements in the array that are not
3423 // literals.
3424 for (int i = 0; i < node->values()->length(); i++) {
3425 Expression* value = node->values()->at(i);
3426
3427 // If value is a literal the property value is already set in the
3428 // boilerplate object.
3429 if (value->AsLiteral() != NULL) continue;
3430 // If value is a materialized literal the property value is already set
3431 // in the boilerplate object if it is simple.
3432 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3433
3434 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003435 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003436 frame_->EmitPop(r0);
3437
3438 // Fetch the object literal.
3439 __ ldr(r1, frame_->Top());
3440 // Get the elements array.
3441 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3442
3443 // Write to the indexed properties array.
3444 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3445 __ str(r0, FieldMemOperand(r1, offset));
3446
3447 // Update the write barrier for the array address.
3448 __ mov(r3, Operand(offset));
3449 __ RecordWrite(r1, r3, r2);
3450 }
Steve Block6ded16b2010-05-10 14:33:55 +01003451 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003452}
3453
3454
3455void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3456#ifdef DEBUG
3457 int original_height = frame_->height();
3458#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003459 // Call runtime routine to allocate the catch extension object and
3460 // assign the exception value to the catch variable.
3461 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003462 Load(node->key());
3463 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003464 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3465 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003466 ASSERT_EQ(original_height + 1, frame_->height());
3467}
3468
3469
3470void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3471#ifdef DEBUG
3472 int original_height = frame_->height();
3473#endif
3474 Comment cmnt(masm(), "[ Variable Assignment");
3475 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3476 ASSERT(var != NULL);
3477 Slot* slot = var->slot();
3478 ASSERT(slot != NULL);
3479
3480 // Evaluate the right-hand side.
3481 if (node->is_compound()) {
3482 // For a compound assignment the right-hand side is a binary operation
3483 // between the current property value and the actual right-hand side.
3484 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3485
3486 // Perform the binary operation.
3487 Literal* literal = node->value()->AsLiteral();
3488 bool overwrite_value =
3489 (node->value()->AsBinaryOperation() != NULL &&
3490 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3491 if (literal != NULL && literal->handle()->IsSmi()) {
3492 SmiOperation(node->binary_op(),
3493 literal->handle(),
3494 false,
3495 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3496 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003497 GenerateInlineSmi inline_smi =
3498 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3499 if (literal != NULL) {
3500 ASSERT(!literal->handle()->IsSmi());
3501 inline_smi = DONT_GENERATE_INLINE_SMI;
3502 }
Steve Block6ded16b2010-05-10 14:33:55 +01003503 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003504 GenericBinaryOperation(node->binary_op(),
3505 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3506 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003507 }
3508 } else {
3509 Load(node->value());
3510 }
3511
3512 // Perform the assignment.
3513 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3514 CodeForSourcePosition(node->position());
3515 StoreToSlot(slot,
3516 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3517 }
3518 ASSERT_EQ(original_height + 1, frame_->height());
3519}
3520
3521
3522void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3523#ifdef DEBUG
3524 int original_height = frame_->height();
3525#endif
3526 Comment cmnt(masm(), "[ Named Property Assignment");
3527 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3528 Property* prop = node->target()->AsProperty();
3529 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3530
3531 // Initialize name and evaluate the receiver sub-expression if necessary. If
3532 // the receiver is trivial it is not placed on the stack at this point, but
3533 // loaded whenever actually needed.
3534 Handle<String> name;
3535 bool is_trivial_receiver = false;
3536 if (var != NULL) {
3537 name = var->name();
3538 } else {
3539 Literal* lit = prop->key()->AsLiteral();
3540 ASSERT_NOT_NULL(lit);
3541 name = Handle<String>::cast(lit->handle());
3542 // Do not materialize the receiver on the frame if it is trivial.
3543 is_trivial_receiver = prop->obj()->IsTrivial();
3544 if (!is_trivial_receiver) Load(prop->obj());
3545 }
3546
3547 // Change to slow case in the beginning of an initialization block to
3548 // avoid the quadratic behavior of repeatedly adding fast properties.
3549 if (node->starts_initialization_block()) {
3550 // Initialization block consists of assignments of the form expr.x = ..., so
3551 // this will never be an assignment to a variable, so there must be a
3552 // receiver object.
3553 ASSERT_EQ(NULL, var);
3554 if (is_trivial_receiver) {
3555 Load(prop->obj());
3556 } else {
3557 frame_->Dup();
3558 }
3559 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3560 }
3561
3562 // Change to fast case at the end of an initialization block. To prepare for
3563 // that add an extra copy of the receiver to the frame, so that it can be
3564 // converted back to fast case after the assignment.
3565 if (node->ends_initialization_block() && !is_trivial_receiver) {
3566 frame_->Dup();
3567 }
3568
3569 // Stack layout:
3570 // [tos] : receiver (only materialized if non-trivial)
3571 // [tos+1] : receiver if at the end of an initialization block
3572
3573 // Evaluate the right-hand side.
3574 if (node->is_compound()) {
3575 // For a compound assignment the right-hand side is a binary operation
3576 // between the current property value and the actual right-hand side.
3577 if (is_trivial_receiver) {
3578 Load(prop->obj());
3579 } else if (var != NULL) {
3580 LoadGlobal();
3581 } else {
3582 frame_->Dup();
3583 }
3584 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003585
3586 // Perform the binary operation.
3587 Literal* literal = node->value()->AsLiteral();
3588 bool overwrite_value =
3589 (node->value()->AsBinaryOperation() != NULL &&
3590 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3591 if (literal != NULL && literal->handle()->IsSmi()) {
3592 SmiOperation(node->binary_op(),
3593 literal->handle(),
3594 false,
3595 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3596 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003597 GenerateInlineSmi inline_smi =
3598 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3599 if (literal != NULL) {
3600 ASSERT(!literal->handle()->IsSmi());
3601 inline_smi = DONT_GENERATE_INLINE_SMI;
3602 }
Steve Block6ded16b2010-05-10 14:33:55 +01003603 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003604 GenericBinaryOperation(node->binary_op(),
3605 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3606 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003607 }
3608 } else {
3609 // For non-compound assignment just load the right-hand side.
3610 Load(node->value());
3611 }
3612
3613 // Stack layout:
3614 // [tos] : value
3615 // [tos+1] : receiver (only materialized if non-trivial)
3616 // [tos+2] : receiver if at the end of an initialization block
3617
3618 // Perform the assignment. It is safe to ignore constants here.
3619 ASSERT(var == NULL || var->mode() != Variable::CONST);
3620 ASSERT_NE(Token::INIT_CONST, node->op());
3621 if (is_trivial_receiver) {
3622 // Load the receiver and swap with the value.
3623 Load(prop->obj());
3624 Register t0 = frame_->PopToRegister();
3625 Register t1 = frame_->PopToRegister(t0);
3626 frame_->EmitPush(t0);
3627 frame_->EmitPush(t1);
3628 }
3629 CodeForSourcePosition(node->position());
3630 bool is_contextual = (var != NULL);
3631 EmitNamedStore(name, is_contextual);
3632 frame_->EmitPush(r0);
3633
3634 // Change to fast case at the end of an initialization block.
3635 if (node->ends_initialization_block()) {
3636 ASSERT_EQ(NULL, var);
3637 // The argument to the runtime call is the receiver.
3638 if (is_trivial_receiver) {
3639 Load(prop->obj());
3640 } else {
3641 // A copy of the receiver is below the value of the assignment. Swap
3642 // the receiver and the value of the assignment expression.
3643 Register t0 = frame_->PopToRegister();
3644 Register t1 = frame_->PopToRegister(t0);
3645 frame_->EmitPush(t0);
3646 frame_->EmitPush(t1);
3647 }
3648 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3649 }
3650
3651 // Stack layout:
3652 // [tos] : result
3653
3654 ASSERT_EQ(original_height + 1, frame_->height());
3655}
3656
3657
3658void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3659#ifdef DEBUG
3660 int original_height = frame_->height();
3661#endif
3662 Comment cmnt(masm_, "[ Keyed Property Assignment");
3663 Property* prop = node->target()->AsProperty();
3664 ASSERT_NOT_NULL(prop);
3665
3666 // Evaluate the receiver subexpression.
3667 Load(prop->obj());
3668
3669 // Change to slow case in the beginning of an initialization block to
3670 // avoid the quadratic behavior of repeatedly adding fast properties.
3671 if (node->starts_initialization_block()) {
3672 frame_->Dup();
3673 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3674 }
3675
3676 // Change to fast case at the end of an initialization block. To prepare for
3677 // that add an extra copy of the receiver to the frame, so that it can be
3678 // converted back to fast case after the assignment.
3679 if (node->ends_initialization_block()) {
3680 frame_->Dup();
3681 }
3682
3683 // Evaluate the key subexpression.
3684 Load(prop->key());
3685
3686 // Stack layout:
3687 // [tos] : key
3688 // [tos+1] : receiver
3689 // [tos+2] : receiver if at the end of an initialization block
3690
3691 // Evaluate the right-hand side.
3692 if (node->is_compound()) {
3693 // For a compound assignment the right-hand side is a binary operation
3694 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003695 // Duplicate receiver and key for loading the current property value.
3696 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003697 EmitKeyedLoad();
3698 frame_->EmitPush(r0);
3699
3700 // Perform the binary operation.
3701 Literal* literal = node->value()->AsLiteral();
3702 bool overwrite_value =
3703 (node->value()->AsBinaryOperation() != NULL &&
3704 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3705 if (literal != NULL && literal->handle()->IsSmi()) {
3706 SmiOperation(node->binary_op(),
3707 literal->handle(),
3708 false,
3709 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3710 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003711 GenerateInlineSmi inline_smi =
3712 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3713 if (literal != NULL) {
3714 ASSERT(!literal->handle()->IsSmi());
3715 inline_smi = DONT_GENERATE_INLINE_SMI;
3716 }
Steve Block6ded16b2010-05-10 14:33:55 +01003717 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003718 GenericBinaryOperation(node->binary_op(),
3719 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3720 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003721 }
3722 } else {
3723 // For non-compound assignment just load the right-hand side.
3724 Load(node->value());
3725 }
3726
3727 // Stack layout:
3728 // [tos] : value
3729 // [tos+1] : key
3730 // [tos+2] : receiver
3731 // [tos+3] : receiver if at the end of an initialization block
3732
3733 // Perform the assignment. It is safe to ignore constants here.
3734 ASSERT(node->op() != Token::INIT_CONST);
3735 CodeForSourcePosition(node->position());
Steve Block6ded16b2010-05-10 14:33:55 +01003736 EmitKeyedStore(prop->key()->type());
Steve Block6ded16b2010-05-10 14:33:55 +01003737 frame_->EmitPush(r0);
3738
3739 // Stack layout:
3740 // [tos] : result
3741 // [tos+1] : receiver if at the end of an initialization block
3742
3743 // Change to fast case at the end of an initialization block.
3744 if (node->ends_initialization_block()) {
3745 // The argument to the runtime call is the extra copy of the receiver,
3746 // which is below the value of the assignment. Swap the receiver and
3747 // the value of the assignment expression.
3748 Register t0 = frame_->PopToRegister();
3749 Register t1 = frame_->PopToRegister(t0);
3750 frame_->EmitPush(t1);
3751 frame_->EmitPush(t0);
3752 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3753 }
3754
3755 // Stack layout:
3756 // [tos] : result
3757
3758 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003759}
3760
3761
3762void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01003763 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00003764#ifdef DEBUG
3765 int original_height = frame_->height();
3766#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003767 Comment cmnt(masm_, "[ Assignment");
3768
Steve Block6ded16b2010-05-10 14:33:55 +01003769 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3770 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00003771
Steve Block6ded16b2010-05-10 14:33:55 +01003772 if (var != NULL && !var->is_global()) {
3773 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003774
Steve Block6ded16b2010-05-10 14:33:55 +01003775 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
3776 (var != NULL && var->is_global())) {
3777 // Properties whose keys are property names and global variables are
3778 // treated as named property references. We do not need to consider
3779 // global 'this' because it is not a valid left-hand side.
3780 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003781
Steve Block6ded16b2010-05-10 14:33:55 +01003782 } else if (prop != NULL) {
3783 // Other properties (including rewritten parameters for a function that
3784 // uses arguments) are keyed property assignments.
3785 EmitKeyedPropertyAssignment(node);
3786
3787 } else {
3788 // Invalid left-hand side.
3789 Load(node->target());
3790 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
3791 // The runtime call doesn't actually return but the code generator will
3792 // still generate code and expects a certain frame height.
3793 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003794 }
Steve Block6ded16b2010-05-10 14:33:55 +01003795 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003796}
3797
3798
3799void CodeGenerator::VisitThrow(Throw* node) {
3800#ifdef DEBUG
3801 int original_height = frame_->height();
3802#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003803 Comment cmnt(masm_, "[ Throw");
3804
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003805 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00003806 CodeForSourcePosition(node->position());
3807 frame_->CallRuntime(Runtime::kThrow, 1);
3808 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003809 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003810}
3811
3812
3813void CodeGenerator::VisitProperty(Property* node) {
3814#ifdef DEBUG
3815 int original_height = frame_->height();
3816#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003817 Comment cmnt(masm_, "[ Property");
3818
3819 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003820 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003821 }
Steve Block6ded16b2010-05-10 14:33:55 +01003822 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003823}
3824
3825
3826void CodeGenerator::VisitCall(Call* node) {
3827#ifdef DEBUG
3828 int original_height = frame_->height();
3829#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003830 Comment cmnt(masm_, "[ Call");
3831
3832 Expression* function = node->expression();
3833 ZoneList<Expression*>* args = node->arguments();
3834
3835 // Standard function call.
3836 // Check if the function is a variable or a property.
3837 Variable* var = function->AsVariableProxy()->AsVariable();
3838 Property* property = function->AsProperty();
3839
3840 // ------------------------------------------------------------------------
3841 // Fast-case: Use inline caching.
3842 // ---
3843 // According to ECMA-262, section 11.2.3, page 44, the function to call
3844 // must be resolved after the arguments have been evaluated. The IC code
3845 // automatically handles this by loading the arguments before the function
3846 // is resolved in cache misses (this also holds for megamorphic calls).
3847 // ------------------------------------------------------------------------
3848
3849 if (var != NULL && var->is_possibly_eval()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003850 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003851 // ----------------------------------
3852 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
3853 // ----------------------------------
3854
3855 // In a call to eval, we first call %ResolvePossiblyDirectEval to
3856 // resolve the function we need to call and the receiver of the
3857 // call. Then we call the resolved function using the given
3858 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003859
Steve Blocka7e24c12009-10-30 11:49:00 +00003860 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003861 Load(function);
3862
3863 // Allocate a frame slot for the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003864 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003865 frame_->EmitPush(r2);
3866
3867 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00003868 int arg_count = args->length();
3869 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003870 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00003871 }
3872
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003873 // If we know that eval can only be shadowed by eval-introduced
3874 // variables we attempt to load the global eval function directly
3875 // in generated code. If we succeed, there is no need to perform a
3876 // context lookup in the runtime system.
3877 JumpTarget done;
3878 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
3879 ASSERT(var->slot()->type() == Slot::LOOKUP);
3880 JumpTarget slow;
3881 // Prepare the stack for the call to
3882 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
3883 // function, the first argument to the eval call and the
3884 // receiver.
3885 LoadFromGlobalSlotCheckExtensions(var->slot(),
3886 NOT_INSIDE_TYPEOF,
3887 &slow);
3888 frame_->EmitPush(r0);
3889 if (arg_count > 0) {
3890 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3891 frame_->EmitPush(r1);
3892 } else {
3893 frame_->EmitPush(r2);
3894 }
3895 __ ldr(r1, frame_->Receiver());
3896 frame_->EmitPush(r1);
3897
3898 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
3899
3900 done.Jump();
3901 slow.Bind();
3902 }
3903
3904 // Prepare the stack for the call to ResolvePossiblyDirectEval by
3905 // pushing the loaded function, the first argument to the eval
3906 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003907 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
3908 frame_->EmitPush(r1);
3909 if (arg_count > 0) {
3910 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3911 frame_->EmitPush(r1);
3912 } else {
3913 frame_->EmitPush(r2);
3914 }
Leon Clarkee46be812010-01-19 14:06:41 +00003915 __ ldr(r1, frame_->Receiver());
3916 frame_->EmitPush(r1);
3917
Steve Blocka7e24c12009-10-30 11:49:00 +00003918 // Resolve the call.
Leon Clarkee46be812010-01-19 14:06:41 +00003919 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003920
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003921 // If we generated fast-case code bind the jump-target where fast
3922 // and slow case merge.
3923 if (done.is_linked()) done.Bind();
3924
Steve Blocka7e24c12009-10-30 11:49:00 +00003925 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00003926 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003927 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3928
3929 // Call the function.
3930 CodeForSourcePosition(node->position());
3931
3932 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003933 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00003934 frame_->CallStub(&call_function, arg_count + 1);
3935
3936 __ ldr(cp, frame_->Context());
3937 // Remove the function from the stack.
3938 frame_->Drop();
3939 frame_->EmitPush(r0);
3940
3941 } else if (var != NULL && !var->is_this() && var->is_global()) {
3942 // ----------------------------------
3943 // JavaScript example: 'foo(1, 2, 3)' // foo is global
3944 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00003945 // Pass the global object as the receiver and let the IC stub
3946 // patch the stack to use the global proxy as 'this' in the
3947 // invoked function.
3948 LoadGlobal();
3949
3950 // Load the arguments.
3951 int arg_count = args->length();
3952 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003953 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00003954 }
3955
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003956 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00003957 // Setup the name register and call the IC initialization code.
3958 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003959 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
3960 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
3961 CodeForSourcePosition(node->position());
3962 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
3963 arg_count + 1);
3964 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00003965 frame_->EmitPush(r0);
3966
3967 } else if (var != NULL && var->slot() != NULL &&
3968 var->slot()->type() == Slot::LOOKUP) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003969 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003970 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01003971 // JavaScript examples:
3972 //
3973 // with (obj) foo(1, 2, 3) // foo may be in obj.
3974 //
3975 // function f() {};
3976 // function g() {
3977 // eval(...);
3978 // f(); // f could be in extension object.
3979 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00003980 // ----------------------------------
3981
Kristian Monsen25f61362010-05-21 11:50:48 +01003982 // JumpTargets do not yet support merging frames so the frame must be
3983 // spilled when jumping to these targets.
3984 JumpTarget slow, done;
3985
3986 // Generate fast case for loading functions from slots that
3987 // correspond to local/global variables or arguments unless they
3988 // are shadowed by eval-introduced bindings.
3989 EmitDynamicLoadFromSlotFastCase(var->slot(),
3990 NOT_INSIDE_TYPEOF,
3991 &slow,
3992 &done);
3993
3994 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003995 // Load the function
3996 frame_->EmitPush(cp);
3997 __ mov(r0, Operand(var->name()));
3998 frame_->EmitPush(r0);
3999 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4000 // r0: slot value; r1: receiver
4001
4002 // Load the receiver.
4003 frame_->EmitPush(r0); // function
4004 frame_->EmitPush(r1); // receiver
4005
Kristian Monsen25f61362010-05-21 11:50:48 +01004006 // If fast case code has been generated, emit code to push the
4007 // function and receiver and have the slow path jump around this
4008 // code.
4009 if (done.is_linked()) {
4010 JumpTarget call;
4011 call.Jump();
4012 done.Bind();
4013 frame_->EmitPush(r0); // function
4014 LoadGlobalReceiver(r1); // receiver
4015 call.Bind();
4016 }
4017
4018 // Call the function. At this point, everything is spilled but the
4019 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004020 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004021 frame_->EmitPush(r0);
4022
4023 } else if (property != NULL) {
4024 // Check if the key is a literal string.
4025 Literal* literal = property->key()->AsLiteral();
4026
4027 if (literal != NULL && literal->handle()->IsSymbol()) {
4028 // ------------------------------------------------------------------
4029 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4030 // ------------------------------------------------------------------
4031
Steve Block6ded16b2010-05-10 14:33:55 +01004032 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004033
Steve Block6ded16b2010-05-10 14:33:55 +01004034 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4035 name->IsEqualTo(CStrVector("apply")) &&
4036 args->length() == 2 &&
4037 args->at(1)->AsVariableProxy() != NULL &&
4038 args->at(1)->AsVariableProxy()->IsArguments()) {
4039 // Use the optimized Function.prototype.apply that avoids
4040 // allocating lazily allocated arguments objects.
4041 CallApplyLazy(property->obj(),
4042 args->at(0),
4043 args->at(1)->AsVariableProxy(),
4044 node->position());
4045
4046 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004047 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004048 // Load the arguments.
4049 int arg_count = args->length();
4050 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004051 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004052 }
4053
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004054 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004055 // Set the name register and call the IC initialization code.
4056 __ mov(r2, Operand(name));
4057 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4058 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
4059 CodeForSourcePosition(node->position());
4060 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4061 __ ldr(cp, frame_->Context());
4062 frame_->EmitPush(r0);
4063 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004064
4065 } else {
4066 // -------------------------------------------
4067 // JavaScript example: 'array[index](1, 2, 3)'
4068 // -------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004069 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004070
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004071 Load(property->obj());
Kristian Monsen25f61362010-05-21 11:50:48 +01004072 if (!property->is_synthetic()) {
4073 // Duplicate receiver for later use.
4074 __ ldr(r0, MemOperand(sp, 0));
4075 frame_->EmitPush(r0);
4076 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004077 Load(property->key());
Steve Block6ded16b2010-05-10 14:33:55 +01004078 EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00004079 // Put the function below the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004080 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004081 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004082 frame_->EmitPush(r0); // Function.
Steve Blocka7e24c12009-10-30 11:49:00 +00004083 LoadGlobalReceiver(r0);
4084 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01004085 // Switch receiver and function.
4086 frame_->EmitPop(r1); // Receiver.
4087 frame_->EmitPush(r0); // Function.
4088 frame_->EmitPush(r1); // Receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004089 }
4090
4091 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004092 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004093 frame_->EmitPush(r0);
4094 }
4095
4096 } else {
4097 // ----------------------------------
4098 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4099 // ----------------------------------
4100
4101 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004102 Load(function);
4103
4104 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004105
4106 // Pass the global proxy as the receiver.
4107 LoadGlobalReceiver(r0);
4108
4109 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004110 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004111 frame_->EmitPush(r0);
4112 }
Steve Block6ded16b2010-05-10 14:33:55 +01004113 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004114}
4115
4116
4117void CodeGenerator::VisitCallNew(CallNew* node) {
4118#ifdef DEBUG
4119 int original_height = frame_->height();
4120#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004121 Comment cmnt(masm_, "[ CallNew");
4122
4123 // According to ECMA-262, section 11.2.2, page 44, the function
4124 // expression in new calls must be evaluated before the
4125 // arguments. This is different from ordinary calls, where the
4126 // actual function to call is resolved after the arguments have been
4127 // evaluated.
4128
4129 // Compute function to call and use the global object as the
4130 // receiver. There is no need to use the global proxy here because
4131 // it will always be replaced with a newly allocated object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004132 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004133 LoadGlobal();
4134
4135 // Push the arguments ("left-to-right") on the stack.
4136 ZoneList<Expression*>* args = node->arguments();
4137 int arg_count = args->length();
4138 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004139 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004140 }
4141
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004142 VirtualFrame::SpilledScope spilled_scope(frame_);
4143
Steve Blocka7e24c12009-10-30 11:49:00 +00004144 // r0: the number of arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004145 __ mov(r0, Operand(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004146 // Load the function into r1 as per calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004147 __ ldr(r1, frame_->ElementAt(arg_count + 1));
4148
4149 // Call the construct call builtin that handles allocation and
4150 // constructor invocation.
4151 CodeForSourcePosition(node->position());
4152 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004153 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004154
4155 // Discard old TOS value and push r0 on the stack (same as Pop(), push(r0)).
4156 __ str(r0, frame_->Top());
Steve Block6ded16b2010-05-10 14:33:55 +01004157 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004158}
4159
4160
4161void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004162 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004163 ASSERT(args->length() == 1);
4164 JumpTarget leave, null, function, non_function_constructor;
4165
4166 // Load the object into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004167 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004168 frame_->EmitPop(r0);
4169
4170 // If the object is a smi, we return null.
4171 __ tst(r0, Operand(kSmiTagMask));
4172 null.Branch(eq);
4173
4174 // Check that the object is a JS object but take special care of JS
4175 // functions to make sure they have 'Function' as their class.
4176 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4177 null.Branch(lt);
4178
4179 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4180 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4181 // LAST_JS_OBJECT_TYPE.
4182 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4183 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
4184 __ cmp(r1, Operand(JS_FUNCTION_TYPE));
4185 function.Branch(eq);
4186
4187 // Check if the constructor in the map is a function.
4188 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
4189 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4190 non_function_constructor.Branch(ne);
4191
4192 // The r0 register now contains the constructor function. Grab the
4193 // instance class name from there.
4194 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
4195 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
4196 frame_->EmitPush(r0);
4197 leave.Jump();
4198
4199 // Functions have class 'Function'.
4200 function.Bind();
4201 __ mov(r0, Operand(Factory::function_class_symbol()));
4202 frame_->EmitPush(r0);
4203 leave.Jump();
4204
4205 // Objects with a non-function constructor have class 'Object'.
4206 non_function_constructor.Bind();
4207 __ mov(r0, Operand(Factory::Object_symbol()));
4208 frame_->EmitPush(r0);
4209 leave.Jump();
4210
4211 // Non-JS objects have class null.
4212 null.Bind();
4213 __ LoadRoot(r0, Heap::kNullValueRootIndex);
4214 frame_->EmitPush(r0);
4215
4216 // All done.
4217 leave.Bind();
4218}
4219
4220
4221void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004222 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004223 ASSERT(args->length() == 1);
4224 JumpTarget leave;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004225 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004226 frame_->EmitPop(r0); // r0 contains object.
4227 // if (object->IsSmi()) return the object.
4228 __ tst(r0, Operand(kSmiTagMask));
4229 leave.Branch(eq);
4230 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
4231 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
4232 leave.Branch(ne);
4233 // Load the value.
4234 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
4235 leave.Bind();
4236 frame_->EmitPush(r0);
4237}
4238
4239
4240void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004241 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004242 ASSERT(args->length() == 2);
4243 JumpTarget leave;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004244 Load(args->at(0)); // Load the object.
4245 Load(args->at(1)); // Load the value.
Steve Blocka7e24c12009-10-30 11:49:00 +00004246 frame_->EmitPop(r0); // r0 contains value
4247 frame_->EmitPop(r1); // r1 contains object
4248 // if (object->IsSmi()) return object.
4249 __ tst(r1, Operand(kSmiTagMask));
4250 leave.Branch(eq);
4251 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
4252 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
4253 leave.Branch(ne);
4254 // Store the value.
4255 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
4256 // Update the write barrier.
4257 __ mov(r2, Operand(JSValue::kValueOffset - kHeapObjectTag));
4258 __ RecordWrite(r1, r2, r3);
4259 // Leave.
4260 leave.Bind();
4261 frame_->EmitPush(r0);
4262}
4263
4264
4265void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004266 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004267 Load(args->at(0));
4268 Register reg = frame_->PopToRegister();
4269 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004270 cc_reg_ = eq;
4271}
4272
4273
4274void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004275 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4276 ASSERT_EQ(args->length(), 3);
4277#ifdef ENABLE_LOGGING_AND_PROFILING
4278 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004279 Load(args->at(1));
4280 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004281 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004282 }
4283#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004284 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004285}
4286
4287
4288void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004289 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004290 Load(args->at(0));
4291 Register reg = frame_->PopToRegister();
4292 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004293 cc_reg_ = eq;
4294}
4295
4296
Steve Block6ded16b2010-05-10 14:33:55 +01004297// Generates the Math.pow method - currently just calls runtime.
4298void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4299 ASSERT(args->length() == 2);
4300 Load(args->at(0));
4301 Load(args->at(1));
4302 frame_->CallRuntime(Runtime::kMath_pow, 2);
4303 frame_->EmitPush(r0);
4304}
4305
4306
4307// Generates the Math.sqrt method - currently just calls runtime.
4308void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4309 ASSERT(args->length() == 1);
4310 Load(args->at(0));
4311 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4312 frame_->EmitPush(r0);
4313}
4314
4315
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004316class DeferredStringCharCodeAt : public DeferredCode {
4317 public:
4318 DeferredStringCharCodeAt(Register object,
4319 Register index,
4320 Register scratch,
4321 Register result)
4322 : result_(result),
4323 char_code_at_generator_(object,
4324 index,
4325 scratch,
4326 result,
4327 &need_conversion_,
4328 &need_conversion_,
4329 &index_out_of_range_,
4330 STRING_INDEX_IS_NUMBER) {}
4331
4332 StringCharCodeAtGenerator* fast_case_generator() {
4333 return &char_code_at_generator_;
4334 }
4335
4336 virtual void Generate() {
4337 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4338 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4339
4340 __ bind(&need_conversion_);
4341 // Move the undefined value into the result register, which will
4342 // trigger conversion.
4343 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4344 __ jmp(exit_label());
4345
4346 __ bind(&index_out_of_range_);
4347 // When the index is out of range, the spec requires us to return
4348 // NaN.
4349 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4350 __ jmp(exit_label());
4351 }
4352
4353 private:
4354 Register result_;
4355
4356 Label need_conversion_;
4357 Label index_out_of_range_;
4358
4359 StringCharCodeAtGenerator char_code_at_generator_;
4360};
4361
4362
4363// This generates code that performs a String.prototype.charCodeAt() call
4364// or returns a smi in order to trigger conversion.
4365void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
4366 VirtualFrame::SpilledScope spilled_scope(frame_);
4367 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004368 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004369
Leon Clarkef7060e22010-06-03 12:02:55 +01004370 Load(args->at(0));
4371 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004372
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004373 Register index = r1;
4374 Register object = r2;
Steve Blockd0582a62009-12-15 09:54:21 +00004375
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004376 frame_->EmitPop(r1);
4377 frame_->EmitPop(r2);
Steve Blockd0582a62009-12-15 09:54:21 +00004378
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004379 // We need two extra registers.
4380 Register scratch = r3;
4381 Register result = r0;
4382
4383 DeferredStringCharCodeAt* deferred =
4384 new DeferredStringCharCodeAt(object,
4385 index,
4386 scratch,
4387 result);
4388 deferred->fast_case_generator()->GenerateFast(masm_);
4389 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004390 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004391}
4392
4393
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004394class DeferredStringCharFromCode : public DeferredCode {
4395 public:
4396 DeferredStringCharFromCode(Register code,
4397 Register result)
4398 : char_from_code_generator_(code, result) {}
4399
4400 StringCharFromCodeGenerator* fast_case_generator() {
4401 return &char_from_code_generator_;
4402 }
4403
4404 virtual void Generate() {
4405 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4406 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4407 }
4408
4409 private:
4410 StringCharFromCodeGenerator char_from_code_generator_;
4411};
4412
4413
4414// Generates code for creating a one-char string from a char code.
4415void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
4416 VirtualFrame::SpilledScope spilled_scope(frame_);
4417 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004418 ASSERT(args->length() == 1);
4419
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004420 Load(args->at(0));
4421
Steve Block6ded16b2010-05-10 14:33:55 +01004422 Register code = r1;
Steve Block6ded16b2010-05-10 14:33:55 +01004423 Register result = r0;
4424
Steve Block6ded16b2010-05-10 14:33:55 +01004425 frame_->EmitPop(code);
4426
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004427 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4428 code, result);
4429 deferred->fast_case_generator()->GenerateFast(masm_);
4430 deferred->BindExit();
4431 frame_->EmitPush(result);
4432}
4433
4434
4435class DeferredStringCharAt : public DeferredCode {
4436 public:
4437 DeferredStringCharAt(Register object,
4438 Register index,
4439 Register scratch1,
4440 Register scratch2,
4441 Register result)
4442 : result_(result),
4443 char_at_generator_(object,
4444 index,
4445 scratch1,
4446 scratch2,
4447 result,
4448 &need_conversion_,
4449 &need_conversion_,
4450 &index_out_of_range_,
4451 STRING_INDEX_IS_NUMBER) {}
4452
4453 StringCharAtGenerator* fast_case_generator() {
4454 return &char_at_generator_;
4455 }
4456
4457 virtual void Generate() {
4458 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4459 char_at_generator_.GenerateSlow(masm(), call_helper);
4460
4461 __ bind(&need_conversion_);
4462 // Move smi zero into the result register, which will trigger
4463 // conversion.
4464 __ mov(result_, Operand(Smi::FromInt(0)));
4465 __ jmp(exit_label());
4466
4467 __ bind(&index_out_of_range_);
4468 // When the index is out of range, the spec requires us to return
4469 // the empty string.
4470 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4471 __ jmp(exit_label());
4472 }
4473
4474 private:
4475 Register result_;
4476
4477 Label need_conversion_;
4478 Label index_out_of_range_;
4479
4480 StringCharAtGenerator char_at_generator_;
4481};
4482
4483
4484// This generates code that performs a String.prototype.charAt() call
4485// or returns a smi in order to trigger conversion.
4486void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
4487 VirtualFrame::SpilledScope spilled_scope(frame_);
4488 Comment(masm_, "[ GenerateStringCharAt");
4489 ASSERT(args->length() == 2);
4490
4491 Load(args->at(0));
4492 Load(args->at(1));
4493
4494 Register index = r1;
4495 Register object = r2;
4496
4497 frame_->EmitPop(r1);
4498 frame_->EmitPop(r2);
4499
4500 // We need three extra registers.
4501 Register scratch1 = r3;
4502 Register scratch2 = r4;
4503 Register result = r0;
4504
4505 DeferredStringCharAt* deferred =
4506 new DeferredStringCharAt(object,
4507 index,
4508 scratch1,
4509 scratch2,
4510 result);
4511 deferred->fast_case_generator()->GenerateFast(masm_);
4512 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004513 frame_->EmitPush(result);
4514}
4515
4516
Steve Blocka7e24c12009-10-30 11:49:00 +00004517void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004518 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004519 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004520 JumpTarget answer;
4521 // We need the CC bits to come out as not_equal in the case where the
4522 // object is a smi. This can't be done with the usual test opcode so
4523 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004524 Register possible_array = frame_->PopToRegister();
4525 Register scratch = VirtualFrame::scratch0();
4526 __ and_(scratch, possible_array, Operand(kSmiTagMask));
4527 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00004528 answer.Branch(ne);
4529 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004530 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004531 answer.Bind();
4532 cc_reg_ = eq;
4533}
4534
4535
Andrei Popescu402d9372010-02-26 13:31:12 +00004536void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004537 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004538 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00004539 JumpTarget answer;
4540 // We need the CC bits to come out as not_equal in the case where the
4541 // object is a smi. This can't be done with the usual test opcode so
4542 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004543 Register possible_regexp = frame_->PopToRegister();
4544 Register scratch = VirtualFrame::scratch0();
4545 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
4546 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00004547 answer.Branch(ne);
4548 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004549 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00004550 answer.Bind();
4551 cc_reg_ = eq;
4552}
4553
4554
Steve Blockd0582a62009-12-15 09:54:21 +00004555void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
4556 // This generates a fast version of:
4557 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00004558 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004559 Load(args->at(0));
4560 Register possible_object = frame_->PopToRegister();
4561 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00004562 false_target()->Branch(eq);
4563
4564 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004565 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00004566 true_target()->Branch(eq);
4567
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004568 Register map_reg = VirtualFrame::scratch0();
4569 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00004570 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004571 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
4572 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01004573 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00004574
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004575 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
4576 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00004577 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004578 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00004579 cc_reg_ = le;
4580}
4581
4582
4583void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
4584 // This generates a fast version of:
4585 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00004586 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004587 Load(args->at(0));
4588 Register possible_function = frame_->PopToRegister();
4589 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00004590 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004591 Register map_reg = VirtualFrame::scratch0();
4592 Register scratch = VirtualFrame::scratch1();
4593 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00004594 cc_reg_ = eq;
4595}
4596
4597
Leon Clarked91b9f72010-01-27 17:25:45 +00004598void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004599 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004600 Load(args->at(0));
4601 Register possible_undetectable = frame_->PopToRegister();
4602 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00004603 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004604 Register scratch = VirtualFrame::scratch0();
4605 __ ldr(scratch,
4606 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
4607 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
4608 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00004609 cc_reg_ = ne;
4610}
4611
4612
Steve Blocka7e24c12009-10-30 11:49:00 +00004613void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004614 ASSERT(args->length() == 0);
4615
Leon Clarkef7060e22010-06-03 12:02:55 +01004616 Register scratch0 = VirtualFrame::scratch0();
4617 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004618 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004619 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004620
4621 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01004622 __ ldr(scratch1,
4623 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
4624 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4625 __ ldr(scratch0,
4626 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00004627
4628 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004629 __ ldr(scratch1,
4630 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
4631 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00004632 cc_reg_ = eq;
4633}
4634
4635
4636void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004637 ASSERT(args->length() == 0);
4638
Leon Clarkef7060e22010-06-03 12:02:55 +01004639 Register tos = frame_->GetTOSRegister();
4640 Register scratch0 = VirtualFrame::scratch0();
4641 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004642
Steve Block6ded16b2010-05-10 14:33:55 +01004643 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004644 __ ldr(scratch0,
4645 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4646 __ ldr(scratch1,
4647 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
4648 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4649
4650 // Get the number of formal parameters.
4651 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01004652
4653 // Arguments adaptor case: Read the arguments length from the
4654 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004655 __ ldr(tos,
4656 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
4657 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01004658
Leon Clarkef7060e22010-06-03 12:02:55 +01004659 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004660}
4661
4662
Steve Block6ded16b2010-05-10 14:33:55 +01004663void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
4664 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004665 ASSERT(args->length() == 1);
4666
4667 // Satisfy contract with ArgumentsAccessStub:
4668 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004669 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004670 frame_->EmitPop(r1);
Andrei Popescu31002712010-02-23 13:46:05 +00004671 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00004672
4673 // Call the shared stub to get to arguments[key].
4674 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
4675 frame_->CallStub(&stub, 0);
4676 frame_->EmitPush(r0);
4677}
4678
4679
Steve Block6ded16b2010-05-10 14:33:55 +01004680void CodeGenerator::GenerateRandomHeapNumber(
4681 ZoneList<Expression*>* args) {
4682 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004683 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01004684
4685 Label slow_allocate_heapnumber;
4686 Label heapnumber_allocated;
4687
4688 __ AllocateHeapNumber(r4, r1, r2, &slow_allocate_heapnumber);
4689 __ jmp(&heapnumber_allocated);
4690
4691 __ bind(&slow_allocate_heapnumber);
4692 // To allocate a heap number, and ensure that it is not a smi, we
4693 // call the runtime function FUnaryMinus on 0, returning the double
4694 // -0.0. A new, distinct heap number is returned each time.
4695 __ mov(r0, Operand(Smi::FromInt(0)));
4696 __ push(r0);
4697 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
4698 __ mov(r4, Operand(r0));
4699
4700 __ bind(&heapnumber_allocated);
4701
4702 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
4703 // by computing:
4704 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
4705 if (CpuFeatures::IsSupported(VFP3)) {
4706 __ PrepareCallCFunction(0, r1);
4707 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
4708
4709 CpuFeatures::Scope scope(VFP3);
4710 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
4711 // Create this constant using mov/orr to avoid PC relative load.
4712 __ mov(r1, Operand(0x41000000));
4713 __ orr(r1, r1, Operand(0x300000));
4714 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
4715 __ vmov(d7, r0, r1);
4716 // Move 0x4130000000000000 to VFP.
4717 __ mov(r0, Operand(0));
4718 __ vmov(d8, r0, r1);
4719 // Subtract and store the result in the heap number.
4720 __ vsub(d7, d7, d8);
4721 __ sub(r0, r4, Operand(kHeapObjectTag));
4722 __ vstr(d7, r0, HeapNumber::kValueOffset);
4723 frame_->EmitPush(r4);
4724 } else {
4725 __ mov(r0, Operand(r4));
4726 __ PrepareCallCFunction(1, r1);
4727 __ CallCFunction(
4728 ExternalReference::fill_heap_number_with_random_function(), 1);
4729 frame_->EmitPush(r0);
4730 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004731}
4732
4733
Steve Blockd0582a62009-12-15 09:54:21 +00004734void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
4735 ASSERT_EQ(2, args->length());
4736
4737 Load(args->at(0));
4738 Load(args->at(1));
4739
Andrei Popescu31002712010-02-23 13:46:05 +00004740 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004741 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00004742 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004743 frame_->EmitPush(r0);
4744}
4745
4746
Leon Clarkee46be812010-01-19 14:06:41 +00004747void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
4748 ASSERT_EQ(3, args->length());
4749
4750 Load(args->at(0));
4751 Load(args->at(1));
4752 Load(args->at(2));
4753
Andrei Popescu31002712010-02-23 13:46:05 +00004754 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004755 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00004756 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004757 frame_->EmitPush(r0);
4758}
4759
4760
4761void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
4762 ASSERT_EQ(2, args->length());
4763
4764 Load(args->at(0));
4765 Load(args->at(1));
4766
Leon Clarked91b9f72010-01-27 17:25:45 +00004767 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004768 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00004769 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004770 frame_->EmitPush(r0);
4771}
4772
4773
4774void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
4775 ASSERT_EQ(4, args->length());
4776
4777 Load(args->at(0));
4778 Load(args->at(1));
4779 Load(args->at(2));
4780 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01004781 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004782 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01004783 frame_->CallStub(&stub, 4);
4784 frame_->EmitPush(r0);
4785}
Leon Clarkee46be812010-01-19 14:06:41 +00004786
Steve Block6ded16b2010-05-10 14:33:55 +01004787
4788void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
4789 // No stub. This code only occurs a few times in regexp.js.
4790 const int kMaxInlineLength = 100;
4791 ASSERT_EQ(3, args->length());
4792 Load(args->at(0)); // Size of array, smi.
4793 Load(args->at(1)); // "index" property value.
4794 Load(args->at(2)); // "input" property value.
4795 {
4796 VirtualFrame::SpilledScope spilled_scope(frame_);
4797 Label slowcase;
4798 Label done;
4799 __ ldr(r1, MemOperand(sp, kPointerSize * 2));
4800 STATIC_ASSERT(kSmiTag == 0);
4801 STATIC_ASSERT(kSmiTagSize == 1);
4802 __ tst(r1, Operand(kSmiTagMask));
4803 __ b(ne, &slowcase);
4804 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
4805 __ b(hi, &slowcase);
4806 // Smi-tagging is equivalent to multiplying by 2.
4807 // Allocate RegExpResult followed by FixedArray with size in ebx.
4808 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
4809 // Elements: [Map][Length][..elements..]
4810 // Size of JSArray with two in-object properties and the header of a
4811 // FixedArray.
4812 int objects_size =
4813 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
4814 __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
4815 __ add(r2, r5, Operand(objects_size));
Kristian Monsen25f61362010-05-21 11:50:48 +01004816 __ AllocateInNewSpace(
4817 r2, // In: Size, in words.
4818 r0, // Out: Start of allocation (tagged).
4819 r3, // Scratch register.
4820 r4, // Scratch register.
4821 &slowcase,
4822 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Block6ded16b2010-05-10 14:33:55 +01004823 // r0: Start of allocated area, object-tagged.
4824 // r1: Number of elements in array, as smi.
4825 // r5: Number of elements, untagged.
4826
4827 // Set JSArray map to global.regexp_result_map().
4828 // Set empty properties FixedArray.
4829 // Set elements to point to FixedArray allocated right after the JSArray.
4830 // Interleave operations for better latency.
4831 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
4832 __ add(r3, r0, Operand(JSRegExpResult::kSize));
4833 __ mov(r4, Operand(Factory::empty_fixed_array()));
4834 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
4835 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
4836 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
4837 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4838 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
4839
4840 // Set input, index and length fields from arguments.
4841 __ ldm(ia_w, sp, static_cast<RegList>(r2.bit() | r4.bit()));
4842 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
4843 __ add(sp, sp, Operand(kPointerSize));
4844 __ str(r4, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
4845 __ str(r2, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
4846
4847 // Fill out the elements FixedArray.
4848 // r0: JSArray, tagged.
4849 // r3: FixedArray, tagged.
4850 // r5: Number of elements in array, untagged.
4851
4852 // Set map.
4853 __ mov(r2, Operand(Factory::fixed_array_map()));
4854 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
4855 // Set FixedArray length.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004856 __ mov(r6, Operand(r5, LSL, kSmiTagSize));
4857 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01004858 // Fill contents of fixed-array with the-hole.
4859 __ mov(r2, Operand(Factory::the_hole_value()));
4860 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4861 // Fill fixed array elements with hole.
4862 // r0: JSArray, tagged.
4863 // r2: the hole.
4864 // r3: Start of elements in FixedArray.
4865 // r5: Number of elements to fill.
4866 Label loop;
4867 __ tst(r5, Operand(r5));
4868 __ bind(&loop);
4869 __ b(le, &done); // Jump if r1 is negative or zero.
4870 __ sub(r5, r5, Operand(1), SetCC);
4871 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
4872 __ jmp(&loop);
4873
4874 __ bind(&slowcase);
4875 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
4876
4877 __ bind(&done);
4878 }
4879 frame_->Forget(3);
4880 frame_->EmitPush(r0);
4881}
4882
4883
4884class DeferredSearchCache: public DeferredCode {
4885 public:
4886 DeferredSearchCache(Register dst, Register cache, Register key)
4887 : dst_(dst), cache_(cache), key_(key) {
4888 set_comment("[ DeferredSearchCache");
4889 }
4890
4891 virtual void Generate();
4892
4893 private:
4894 Register dst_, cache_, key_;
4895};
4896
4897
4898void DeferredSearchCache::Generate() {
4899 __ Push(cache_, key_);
4900 __ CallRuntime(Runtime::kGetFromCache, 2);
4901 if (!dst_.is(r0)) {
4902 __ mov(dst_, r0);
4903 }
4904}
4905
4906
4907void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
4908 ASSERT_EQ(2, args->length());
4909
4910 ASSERT_NE(NULL, args->at(0)->AsLiteral());
4911 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
4912
4913 Handle<FixedArray> jsfunction_result_caches(
4914 Top::global_context()->jsfunction_result_caches());
4915 if (jsfunction_result_caches->length() <= cache_id) {
4916 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004917 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01004918 return;
4919 }
4920
4921 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004922
4923 VirtualFrame::SpilledScope spilled_scope(frame_);
4924
Steve Block6ded16b2010-05-10 14:33:55 +01004925 frame_->EmitPop(r2);
4926
4927 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
4928 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
4929 __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4930 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id)));
4931
4932 DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2);
4933
4934 const int kFingerOffset =
4935 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
4936 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4937 __ ldr(r0, FieldMemOperand(r1, kFingerOffset));
4938 // r0 now holds finger offset as a smi.
4939 __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4940 // r3 now points to the start of fixed array elements.
4941 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
4942 // Note side effect of PreIndex: r3 now points to the key of the pair.
4943 __ cmp(r2, r0);
4944 deferred->Branch(ne);
4945
4946 __ ldr(r0, MemOperand(r3, kPointerSize));
4947
4948 deferred->BindExit();
Leon Clarkee46be812010-01-19 14:06:41 +00004949 frame_->EmitPush(r0);
4950}
4951
4952
Andrei Popescu402d9372010-02-26 13:31:12 +00004953void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
4954 ASSERT_EQ(args->length(), 1);
4955
4956 // Load the argument on the stack and jump to the runtime.
4957 Load(args->at(0));
4958
Steve Block6ded16b2010-05-10 14:33:55 +01004959 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004960 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01004961 frame_->CallStub(&stub, 1);
4962 frame_->EmitPush(r0);
4963}
4964
4965
4966class DeferredSwapElements: public DeferredCode {
4967 public:
4968 DeferredSwapElements(Register object, Register index1, Register index2)
4969 : object_(object), index1_(index1), index2_(index2) {
4970 set_comment("[ DeferredSwapElements");
4971 }
4972
4973 virtual void Generate();
4974
4975 private:
4976 Register object_, index1_, index2_;
4977};
4978
4979
4980void DeferredSwapElements::Generate() {
4981 __ push(object_);
4982 __ push(index1_);
4983 __ push(index2_);
4984 __ CallRuntime(Runtime::kSwapElements, 3);
4985}
4986
4987
4988void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
4989 Comment cmnt(masm_, "[ GenerateSwapElements");
4990
4991 ASSERT_EQ(3, args->length());
4992
4993 Load(args->at(0));
4994 Load(args->at(1));
4995 Load(args->at(2));
4996
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004997 VirtualFrame::SpilledScope spilled_scope(frame_);
4998
Steve Block6ded16b2010-05-10 14:33:55 +01004999 Register index2 = r2;
5000 Register index1 = r1;
5001 Register object = r0;
5002 Register tmp1 = r3;
5003 Register tmp2 = r4;
5004
5005 frame_->EmitPop(index2);
5006 frame_->EmitPop(index1);
5007 frame_->EmitPop(object);
5008
5009 DeferredSwapElements* deferred =
5010 new DeferredSwapElements(object, index1, index2);
5011
5012 // Fetch the map and check if array is in fast case.
5013 // Check that object doesn't require security checks and
5014 // has no indexed interceptor.
5015 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5016 deferred->Branch(lt);
5017 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5018 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
5019 deferred->Branch(nz);
5020
5021 // Check the object's elements are in fast case.
5022 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5023 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5024 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5025 __ cmp(tmp2, ip);
5026 deferred->Branch(ne);
5027
5028 // Smi-tagging is equivalent to multiplying by 2.
5029 STATIC_ASSERT(kSmiTag == 0);
5030 STATIC_ASSERT(kSmiTagSize == 1);
5031
5032 // Check that both indices are smis.
5033 __ mov(tmp2, index1);
5034 __ orr(tmp2, tmp2, index2);
5035 __ tst(tmp2, Operand(kSmiTagMask));
5036 deferred->Branch(nz);
5037
5038 // Bring the offsets into the fixed array in tmp1 into index1 and
5039 // index2.
5040 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5041 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5042 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5043
5044 // Swap elements.
5045 Register tmp3 = object;
5046 object = no_reg;
5047 __ ldr(tmp3, MemOperand(tmp1, index1));
5048 __ ldr(tmp2, MemOperand(tmp1, index2));
5049 __ str(tmp3, MemOperand(tmp1, index2));
5050 __ str(tmp2, MemOperand(tmp1, index1));
5051
5052 Label done;
5053 __ InNewSpace(tmp1, tmp2, eq, &done);
5054 // Possible optimization: do a check that both values are Smis
5055 // (or them and test against Smi mask.)
5056
5057 __ mov(tmp2, tmp1);
5058 RecordWriteStub recordWrite1(tmp1, index1, tmp3);
5059 __ CallStub(&recordWrite1);
5060
5061 RecordWriteStub recordWrite2(tmp2, index2, tmp3);
5062 __ CallStub(&recordWrite2);
5063
5064 __ bind(&done);
5065
5066 deferred->BindExit();
5067 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5068 frame_->EmitPush(tmp1);
5069}
5070
5071
5072void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5073 Comment cmnt(masm_, "[ GenerateCallFunction");
5074
5075 ASSERT(args->length() >= 2);
5076
5077 int n_args = args->length() - 2; // for receiver and function.
5078 Load(args->at(0)); // receiver
5079 for (int i = 0; i < n_args; i++) {
5080 Load(args->at(i + 1));
5081 }
5082 Load(args->at(n_args + 1)); // function
5083 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005084 frame_->EmitPush(r0);
5085}
5086
5087
5088void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5089 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005090 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005091 if (CpuFeatures::IsSupported(VFP3)) {
5092 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5093 frame_->SpillAllButCopyTOSToR0();
5094 frame_->CallStub(&stub, 1);
5095 } else {
5096 frame_->CallRuntime(Runtime::kMath_sin, 1);
5097 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005098 frame_->EmitPush(r0);
5099}
5100
5101
5102void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5103 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005104 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005105 if (CpuFeatures::IsSupported(VFP3)) {
5106 TranscendentalCacheStub stub(TranscendentalCache::COS);
5107 frame_->SpillAllButCopyTOSToR0();
5108 frame_->CallStub(&stub, 1);
5109 } else {
5110 frame_->CallRuntime(Runtime::kMath_cos, 1);
5111 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005112 frame_->EmitPush(r0);
5113}
5114
5115
Steve Blocka7e24c12009-10-30 11:49:00 +00005116void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005117 ASSERT(args->length() == 2);
5118
5119 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005120 Load(args->at(0));
5121 Load(args->at(1));
5122 Register lhs = frame_->PopToRegister();
5123 Register rhs = frame_->PopToRegister(lhs);
5124 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005125 cc_reg_ = eq;
5126}
5127
5128
5129void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5130#ifdef DEBUG
5131 int original_height = frame_->height();
5132#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005133 if (CheckForInlineRuntimeCall(node)) {
5134 ASSERT((has_cc() && frame_->height() == original_height) ||
5135 (!has_cc() && frame_->height() == original_height + 1));
5136 return;
5137 }
5138
5139 ZoneList<Expression*>* args = node->arguments();
5140 Comment cmnt(masm_, "[ CallRuntime");
5141 Runtime::Function* function = node->function();
5142
5143 if (function == NULL) {
5144 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005145 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005146 Register scratch = VirtualFrame::scratch0();
5147 __ ldr(scratch, GlobalObject());
5148 Register builtins = frame_->GetTOSRegister();
5149 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5150 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005151 }
5152
5153 // Push the arguments ("left-to-right").
5154 int arg_count = args->length();
5155 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005156 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005157 }
5158
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005159 VirtualFrame::SpilledScope spilled_scope(frame_);
5160
Steve Blocka7e24c12009-10-30 11:49:00 +00005161 if (function == NULL) {
5162 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005163 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005164 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
5165 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
5166 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5167 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005168 frame_->EmitPush(r0);
5169 } else {
5170 // Call the C runtime function.
5171 frame_->CallRuntime(function, arg_count);
5172 frame_->EmitPush(r0);
5173 }
Steve Block6ded16b2010-05-10 14:33:55 +01005174 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005175}
5176
5177
5178void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5179#ifdef DEBUG
5180 int original_height = frame_->height();
5181#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005182 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005183 Comment cmnt(masm_, "[ UnaryOperation");
5184
5185 Token::Value op = node->op();
5186
5187 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005188 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005189 // LoadCondition may (and usually does) leave a test and branch to
5190 // be emitted by the caller. In that case, negate the condition.
5191 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5192
5193 } else if (op == Token::DELETE) {
5194 Property* property = node->expression()->AsProperty();
5195 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5196 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005197 Load(property->obj());
5198 Load(property->key());
Steve Blockd0582a62009-12-15 09:54:21 +00005199 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005200 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005201
5202 } else if (variable != NULL) {
5203 Slot* slot = variable->slot();
5204 if (variable->is_global()) {
5205 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005206 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005207 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005208 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005209
5210 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5211 // lookup the context holding the named variable
5212 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005213 frame_->EmitPush(Operand(variable->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005214 frame_->CallRuntime(Runtime::kLookupContext, 2);
5215 // r0: context
5216 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005217 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005218 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005219 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005220
5221 } else {
5222 // Default: Result of deleting non-global, not dynamically
5223 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005224 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005225 }
5226
5227 } else {
5228 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005229 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005230 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005231 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005232 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005233
5234 } else if (op == Token::TYPEOF) {
5235 // Special case for loading the typeof expression; see comment on
5236 // LoadTypeofExpression().
5237 LoadTypeofExpression(node->expression());
5238 frame_->CallRuntime(Runtime::kTypeof, 1);
5239 frame_->EmitPush(r0); // r0 has result
5240
5241 } else {
Leon Clarke4515c472010-02-03 11:58:03 +00005242 bool overwrite =
5243 (node->expression()->AsBinaryOperation() != NULL &&
5244 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005245 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005246 switch (op) {
5247 case Token::NOT:
5248 case Token::DELETE:
5249 case Token::TYPEOF:
5250 UNREACHABLE(); // handled above
5251 break;
5252
5253 case Token::SUB: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005254 VirtualFrame::SpilledScope spilled(frame_);
5255 frame_->EmitPop(r0);
Leon Clarkee46be812010-01-19 14:06:41 +00005256 GenericUnaryOpStub stub(Token::SUB, overwrite);
Steve Blocka7e24c12009-10-30 11:49:00 +00005257 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005258 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005259 break;
5260 }
5261
5262 case Token::BIT_NOT: {
5263 // smi check
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005264 VirtualFrame::SpilledScope spilled(frame_);
5265 frame_->EmitPop(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005266 JumpTarget smi_label;
5267 JumpTarget continue_label;
5268 __ tst(r0, Operand(kSmiTagMask));
5269 smi_label.Branch(eq);
5270
Leon Clarke4515c472010-02-03 11:58:03 +00005271 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
5272 frame_->CallStub(&stub, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005273 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005274
Steve Blocka7e24c12009-10-30 11:49:00 +00005275 smi_label.Bind();
5276 __ mvn(r0, Operand(r0));
5277 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag
5278 continue_label.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005279 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005280 break;
5281 }
5282
5283 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005284 frame_->Drop();
5285 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005286 break;
5287
5288 case Token::ADD: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005289 VirtualFrame::SpilledScope spilled(frame_);
5290 frame_->EmitPop(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005291 // Smi check.
5292 JumpTarget continue_label;
5293 __ tst(r0, Operand(kSmiTagMask));
5294 continue_label.Branch(eq);
5295 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00005296 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00005297 continue_label.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005298 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005299 break;
5300 }
5301 default:
5302 UNREACHABLE();
5303 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005304 }
5305 ASSERT(!has_valid_frame() ||
5306 (has_cc() && frame_->height() == original_height) ||
5307 (!has_cc() && frame_->height() == original_height + 1));
5308}
5309
5310
5311void CodeGenerator::VisitCountOperation(CountOperation* node) {
5312#ifdef DEBUG
5313 int original_height = frame_->height();
5314#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005315 Comment cmnt(masm_, "[ CountOperation");
5316
5317 bool is_postfix = node->is_postfix();
5318 bool is_increment = node->op() == Token::INC;
5319
5320 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
5321 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005322 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00005323
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005324 if (!is_const && is_slot && type_info(var->slot()).IsSmi()) {
5325 // The type info declares that this variable is always a Smi. That
5326 // means it is a Smi both before and after the increment/decrement.
5327 // Lets make use of that to make a very minimal count.
5328 Reference target(this, node->expression(), !is_const);
5329 ASSERT(!target.is_illegal());
5330 target.GetValue(); // Pushes the value.
5331 Register value = frame_->PopToRegister();
5332 if (is_postfix) frame_->EmitPush(value);
5333 if (is_increment) {
5334 __ add(value, value, Operand(Smi::FromInt(1)));
5335 } else {
5336 __ sub(value, value, Operand(Smi::FromInt(1)));
5337 }
5338 frame_->EmitPush(value);
5339 target.SetValue(NOT_CONST_INIT);
5340 if (is_postfix) frame_->Pop();
5341 ASSERT_EQ(original_height + 1, frame_->height());
5342 return;
5343 }
5344
5345 // If it's a postfix expression and its result is not ignored and the
5346 // reference is non-trivial, then push a placeholder on the stack now
5347 // to hold the result of the expression.
5348 bool placeholder_pushed = false;
5349 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005350 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005351 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00005352 }
5353
Leon Clarked91b9f72010-01-27 17:25:45 +00005354 // A constant reference is not saved to, so a constant reference is not a
5355 // compound assignment reference.
5356 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00005357 if (target.is_illegal()) {
5358 // Spoof the virtual frame to have the expected height (one higher
5359 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005360 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01005361 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005362 return;
5363 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005364
Kristian Monsen25f61362010-05-21 11:50:48 +01005365 // This pushes 0, 1 or 2 words on the object to be used later when updating
5366 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01005367 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005368
5369 JumpTarget slow;
5370 JumpTarget exit;
5371
Kristian Monsen25f61362010-05-21 11:50:48 +01005372 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00005373
5374 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005375 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005376 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005377 } else if (is_postfix) {
5378 frame_->EmitPush(value);
5379 __ mov(VirtualFrame::scratch0(), value);
5380 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00005381 }
5382
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005383 // Check for smi operand.
5384 __ tst(value, Operand(kSmiTagMask));
5385 slow.Branch(ne);
5386
Steve Blocka7e24c12009-10-30 11:49:00 +00005387 // Perform optimistic increment/decrement.
5388 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005389 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005390 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01005391 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005392 }
5393
5394 // If the increment/decrement didn't overflow, we're done.
5395 exit.Branch(vc);
5396
5397 // Revert optimistic increment/decrement.
5398 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005399 __ sub(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005400 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01005401 __ add(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005402 }
5403
Kristian Monsen25f61362010-05-21 11:50:48 +01005404 // Slow case: Convert to number. At this point the
5405 // value to be incremented is in the value register..
Steve Blocka7e24c12009-10-30 11:49:00 +00005406 slow.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01005407
5408 // Convert the operand to a number.
5409 frame_->EmitPush(value);
5410
Steve Blocka7e24c12009-10-30 11:49:00 +00005411 {
Kristian Monsen25f61362010-05-21 11:50:48 +01005412 VirtualFrame::SpilledScope spilled(frame_);
Steve Blockd0582a62009-12-15 09:54:21 +00005413 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01005414
5415 if (is_postfix) {
5416 // Postfix: store to result (on the stack).
5417 __ str(r0, frame_->ElementAt(target.size()));
5418 }
5419
5420 // Compute the new value.
5421 frame_->EmitPush(r0);
5422 frame_->EmitPush(Operand(Smi::FromInt(1)));
5423 if (is_increment) {
5424 frame_->CallRuntime(Runtime::kNumberAdd, 2);
5425 } else {
5426 frame_->CallRuntime(Runtime::kNumberSub, 2);
5427 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005428 }
5429
Kristian Monsen25f61362010-05-21 11:50:48 +01005430 __ Move(value, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005431 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01005432 // At this point the answer is in the value register.
Steve Blocka7e24c12009-10-30 11:49:00 +00005433 exit.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01005434 frame_->EmitPush(value);
5435 // Set the target with the result, leaving the result on
5436 // top of the stack. Removes the target from the stack if
5437 // it has a non-zero size.
Steve Blocka7e24c12009-10-30 11:49:00 +00005438 if (!is_const) target.SetValue(NOT_CONST_INIT);
5439 }
5440
5441 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01005442 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01005443 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005444}
5445
5446
Steve Block6ded16b2010-05-10 14:33:55 +01005447void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005448 // According to ECMA-262 section 11.11, page 58, the binary logical
5449 // operators must yield the result of one of the two expressions
5450 // before any ToBoolean() conversions. This means that the value
5451 // produced by a && or || operator is not necessarily a boolean.
5452
5453 // NOTE: If the left hand side produces a materialized value (not in
5454 // the CC register), we force the right hand side to do the
5455 // same. This is necessary because we may have to branch to the exit
5456 // after evaluating the left hand side (due to the shortcut
5457 // semantics), but the compiler must (statically) know if the result
5458 // of compiling the binary operation is materialized or not.
Leon Clarkef7060e22010-06-03 12:02:55 +01005459 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01005460 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005461 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005462 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005463 if (has_valid_frame() && !has_cc()) {
5464 // The left-hand side result is on top of the virtual frame.
5465 JumpTarget pop_and_continue;
5466 JumpTarget exit;
5467
Leon Clarkef7060e22010-06-03 12:02:55 +01005468 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00005469 // Avoid popping the result if it converts to 'false' using the
5470 // standard ToBoolean() conversion as described in ECMA-262,
5471 // section 9.2, page 30.
5472 ToBoolean(&pop_and_continue, &exit);
5473 Branch(false, &exit);
5474
5475 // Pop the result of evaluating the first part.
5476 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01005477 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005478
5479 // Evaluate right side expression.
5480 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005481 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00005482
5483 // Exit (always with a materialized value).
5484 exit.Bind();
5485 } else if (has_cc() || is_true.is_linked()) {
5486 // The left-hand side is either (a) partially compiled to
5487 // control flow with a final branch left to emit or (b) fully
5488 // compiled to control flow and possibly true.
5489 if (has_cc()) {
5490 Branch(false, false_target());
5491 }
5492 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005493 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005494 } else {
5495 // Nothing to do.
5496 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
5497 }
5498
Steve Block6ded16b2010-05-10 14:33:55 +01005499 } else {
5500 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00005501 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005502 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005503 if (has_valid_frame() && !has_cc()) {
5504 // The left-hand side result is on top of the virtual frame.
5505 JumpTarget pop_and_continue;
5506 JumpTarget exit;
5507
Leon Clarkef7060e22010-06-03 12:02:55 +01005508 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00005509 // Avoid popping the result if it converts to 'true' using the
5510 // standard ToBoolean() conversion as described in ECMA-262,
5511 // section 9.2, page 30.
5512 ToBoolean(&exit, &pop_and_continue);
5513 Branch(true, &exit);
5514
5515 // Pop the result of evaluating the first part.
5516 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01005517 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005518
5519 // Evaluate right side expression.
5520 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005521 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00005522
5523 // Exit (always with a materialized value).
5524 exit.Bind();
5525 } else if (has_cc() || is_false.is_linked()) {
5526 // The left-hand side is either (a) partially compiled to
5527 // control flow with a final branch left to emit or (b) fully
5528 // compiled to control flow and possibly false.
5529 if (has_cc()) {
5530 Branch(true, true_target());
5531 }
5532 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005533 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005534 } else {
5535 // Nothing to do.
5536 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
5537 }
Steve Block6ded16b2010-05-10 14:33:55 +01005538 }
5539}
Steve Blocka7e24c12009-10-30 11:49:00 +00005540
Steve Block6ded16b2010-05-10 14:33:55 +01005541
5542void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
5543#ifdef DEBUG
5544 int original_height = frame_->height();
5545#endif
5546 Comment cmnt(masm_, "[ BinaryOperation");
5547
5548 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01005549 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005550 } else {
5551 // Optimize for the case where (at least) one of the expressions
5552 // is a literal small integer.
5553 Literal* lliteral = node->left()->AsLiteral();
5554 Literal* rliteral = node->right()->AsLiteral();
5555 // NOTE: The code below assumes that the slow cases (calls to runtime)
5556 // never return a constant/immutable object.
5557 bool overwrite_left =
5558 (node->left()->AsBinaryOperation() != NULL &&
5559 node->left()->AsBinaryOperation()->ResultOverwriteAllowed());
5560 bool overwrite_right =
5561 (node->right()->AsBinaryOperation() != NULL &&
5562 node->right()->AsBinaryOperation()->ResultOverwriteAllowed());
5563
5564 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005565 VirtualFrame::RegisterAllocationScope scope(this);
5566 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005567 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00005568 SmiOperation(node->op(),
5569 rliteral->handle(),
5570 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005571 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005572 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005573 VirtualFrame::RegisterAllocationScope scope(this);
5574 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005575 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00005576 SmiOperation(node->op(),
5577 lliteral->handle(),
5578 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005579 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005580 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005581 GenerateInlineSmi inline_smi =
5582 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
5583 if (lliteral != NULL) {
5584 ASSERT(!lliteral->handle()->IsSmi());
5585 inline_smi = DONT_GENERATE_INLINE_SMI;
5586 }
5587 if (rliteral != NULL) {
5588 ASSERT(!rliteral->handle()->IsSmi());
5589 inline_smi = DONT_GENERATE_INLINE_SMI;
5590 }
Steve Block6ded16b2010-05-10 14:33:55 +01005591 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00005592 OverwriteMode overwrite_mode = NO_OVERWRITE;
5593 if (overwrite_left) {
5594 overwrite_mode = OVERWRITE_LEFT;
5595 } else if (overwrite_right) {
5596 overwrite_mode = OVERWRITE_RIGHT;
5597 }
Steve Block6ded16b2010-05-10 14:33:55 +01005598 Load(node->left());
5599 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005600 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00005601 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005602 }
5603 ASSERT(!has_valid_frame() ||
5604 (has_cc() && frame_->height() == original_height) ||
5605 (!has_cc() && frame_->height() == original_height + 1));
5606}
5607
5608
5609void CodeGenerator::VisitThisFunction(ThisFunction* node) {
5610#ifdef DEBUG
5611 int original_height = frame_->height();
5612#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01005613 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01005614 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005615}
5616
5617
5618void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
5619#ifdef DEBUG
5620 int original_height = frame_->height();
5621#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005622 Comment cmnt(masm_, "[ CompareOperation");
5623
Steve Block6ded16b2010-05-10 14:33:55 +01005624 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
5625
Steve Blocka7e24c12009-10-30 11:49:00 +00005626 // Get the expressions from the node.
5627 Expression* left = node->left();
5628 Expression* right = node->right();
5629 Token::Value op = node->op();
5630
5631 // To make null checks efficient, we check if either left or right is the
5632 // literal 'null'. If so, we optimize the code by inlining a null check
5633 // instead of calling the (very) general runtime routine for checking
5634 // equality.
5635 if (op == Token::EQ || op == Token::EQ_STRICT) {
5636 bool left_is_null =
5637 left->AsLiteral() != NULL && left->AsLiteral()->IsNull();
5638 bool right_is_null =
5639 right->AsLiteral() != NULL && right->AsLiteral()->IsNull();
5640 // The 'null' value can only be equal to 'null' or 'undefined'.
5641 if (left_is_null || right_is_null) {
Steve Block6ded16b2010-05-10 14:33:55 +01005642 Load(left_is_null ? right : left);
5643 Register tos = frame_->PopToRegister();
5644 // JumpTargets can't cope with register allocation yet.
5645 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00005646 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005647 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005648
5649 // The 'null' value is only equal to 'undefined' if using non-strict
5650 // comparisons.
5651 if (op != Token::EQ_STRICT) {
5652 true_target()->Branch(eq);
5653
5654 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005655 __ cmp(tos, Operand(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00005656 true_target()->Branch(eq);
5657
Steve Block6ded16b2010-05-10 14:33:55 +01005658 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005659 false_target()->Branch(eq);
5660
5661 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005662 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
5663 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
5664 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
5665 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005666 }
5667
5668 cc_reg_ = eq;
5669 ASSERT(has_cc() && frame_->height() == original_height);
5670 return;
5671 }
5672 }
5673
5674 // To make typeof testing for natives implemented in JavaScript really
5675 // efficient, we generate special code for expressions of the form:
5676 // 'typeof <expression> == <string>'.
5677 UnaryOperation* operation = left->AsUnaryOperation();
5678 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
5679 (operation != NULL && operation->op() == Token::TYPEOF) &&
5680 (right->AsLiteral() != NULL &&
5681 right->AsLiteral()->handle()->IsString())) {
5682 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
5683
Steve Block6ded16b2010-05-10 14:33:55 +01005684 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00005685 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01005686 Register tos = frame_->PopToRegister();
5687
5688 // JumpTargets can't cope with register allocation yet.
5689 frame_->SpillAll();
5690
5691 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00005692
5693 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005694 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005695 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005696 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005697 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005698 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005699 cc_reg_ = eq;
5700
5701 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005702 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005703 false_target()->Branch(eq);
5704
Steve Block6ded16b2010-05-10 14:33:55 +01005705 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005706
5707 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01005708 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
5709 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
5710 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005711 false_target()->Branch(eq);
5712
Steve Block6ded16b2010-05-10 14:33:55 +01005713 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
5714 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005715 cc_reg_ = lt;
5716
5717 } else if (check->Equals(Heap::boolean_symbol())) {
5718 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005719 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005720 true_target()->Branch(eq);
5721 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005722 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005723 cc_reg_ = eq;
5724
5725 } else if (check->Equals(Heap::undefined_symbol())) {
5726 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005727 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005728 true_target()->Branch(eq);
5729
Steve Block6ded16b2010-05-10 14:33:55 +01005730 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005731 false_target()->Branch(eq);
5732
5733 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005734 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
5735 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
5736 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
5737 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005738
5739 cc_reg_ = eq;
5740
5741 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005742 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005743 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005744 Register map_reg = scratch;
5745 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005746 true_target()->Branch(eq);
5747 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01005748 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005749 cc_reg_ = eq;
5750
5751 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005752 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005753 false_target()->Branch(eq);
5754
Steve Blocka7e24c12009-10-30 11:49:00 +00005755 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005756 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005757 true_target()->Branch(eq);
5758
Steve Block6ded16b2010-05-10 14:33:55 +01005759 Register map_reg = scratch;
5760 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005761 false_target()->Branch(eq);
5762
Steve Blocka7e24c12009-10-30 11:49:00 +00005763 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005764 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5765 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
5766 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005767 false_target()->Branch(eq);
5768
Steve Block6ded16b2010-05-10 14:33:55 +01005769 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5770 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005771 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01005772 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005773 cc_reg_ = le;
5774
5775 } else {
5776 // Uncommon case: typeof testing against a string literal that is
5777 // never returned from the typeof operator.
5778 false_target()->Jump();
5779 }
5780 ASSERT(!has_valid_frame() ||
5781 (has_cc() && frame_->height() == original_height));
5782 return;
5783 }
5784
5785 switch (op) {
5786 case Token::EQ:
5787 Comparison(eq, left, right, false);
5788 break;
5789
5790 case Token::LT:
5791 Comparison(lt, left, right);
5792 break;
5793
5794 case Token::GT:
5795 Comparison(gt, left, right);
5796 break;
5797
5798 case Token::LTE:
5799 Comparison(le, left, right);
5800 break;
5801
5802 case Token::GTE:
5803 Comparison(ge, left, right);
5804 break;
5805
5806 case Token::EQ_STRICT:
5807 Comparison(eq, left, right, true);
5808 break;
5809
5810 case Token::IN: {
Steve Block6ded16b2010-05-10 14:33:55 +01005811 VirtualFrame::SpilledScope scope(frame_);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005812 Load(left);
5813 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00005814 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005815 frame_->EmitPush(r0);
5816 break;
5817 }
5818
5819 case Token::INSTANCEOF: {
Steve Block6ded16b2010-05-10 14:33:55 +01005820 VirtualFrame::SpilledScope scope(frame_);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005821 Load(left);
5822 Load(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00005823 InstanceofStub stub;
5824 frame_->CallStub(&stub, 2);
5825 // At this point if instanceof succeeded then r0 == 0.
5826 __ tst(r0, Operand(r0));
5827 cc_reg_ = eq;
5828 break;
5829 }
5830
5831 default:
5832 UNREACHABLE();
5833 }
5834 ASSERT((has_cc() && frame_->height() == original_height) ||
5835 (!has_cc() && frame_->height() == original_height + 1));
5836}
5837
5838
Steve Block6ded16b2010-05-10 14:33:55 +01005839class DeferredReferenceGetNamedValue: public DeferredCode {
5840 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01005841 explicit DeferredReferenceGetNamedValue(Register receiver,
5842 Handle<String> name)
5843 : receiver_(receiver), name_(name) {
Steve Block6ded16b2010-05-10 14:33:55 +01005844 set_comment("[ DeferredReferenceGetNamedValue");
5845 }
5846
5847 virtual void Generate();
5848
5849 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01005850 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01005851 Handle<String> name_;
5852};
5853
5854
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005855// Convention for this is that on entry the receiver is in a register that
5856// is not used by the stack. On exit the answer is found in that same
5857// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01005858void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005859#ifdef DEBUG
5860 int expected_height = frame_state()->frame()->height();
5861#endif
5862 VirtualFrame copied_frame(*frame_state()->frame());
5863 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01005864
Steve Block6ded16b2010-05-10 14:33:55 +01005865 Register scratch1 = VirtualFrame::scratch0();
5866 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005867 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01005868 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
5869 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
5870
Leon Clarkef7060e22010-06-03 12:02:55 +01005871 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
5872 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01005873 __ mov(r2, Operand(name_));
5874
5875 // The rest of the instructions in the deferred code must be together.
5876 { Assembler::BlockConstPoolScope block_const_pool(masm_);
5877 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
5878 __ Call(ic, RelocInfo::CODE_TARGET);
5879 // The call must be followed by a nop(1) instruction to indicate that the
5880 // in-object has been inlined.
5881 __ nop(PROPERTY_ACCESS_INLINED);
5882
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005883 // At this point the answer is in r0. We move it to the expected register
5884 // if necessary.
5885 __ Move(receiver_, r0);
5886
5887 // Now go back to the frame that we entered with. This will not overwrite
5888 // the receiver register since that register was not in use when we came
5889 // in. The instructions emitted by this merge are skipped over by the
5890 // inline load patching mechanism when looking for the branch instruction
5891 // that tells it where the code to patch is.
5892 copied_frame.MergeTo(frame_state()->frame());
5893
Steve Block6ded16b2010-05-10 14:33:55 +01005894 // Block the constant pool for one more instruction after leaving this
5895 // constant pool block scope to include the branch instruction ending the
5896 // deferred code.
5897 __ BlockConstPoolFor(1);
5898 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005899 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01005900}
5901
5902
5903class DeferredReferenceGetKeyedValue: public DeferredCode {
5904 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01005905 DeferredReferenceGetKeyedValue(Register key, Register receiver)
5906 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01005907 set_comment("[ DeferredReferenceGetKeyedValue");
5908 }
5909
5910 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01005911
5912 private:
5913 Register key_;
5914 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01005915};
5916
5917
5918void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01005919 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
5920 (key_.is(r1) && receiver_.is(r0)));
5921
Steve Block6ded16b2010-05-10 14:33:55 +01005922 Register scratch1 = VirtualFrame::scratch0();
5923 Register scratch2 = VirtualFrame::scratch1();
5924 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
5925 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
5926
Kristian Monsen25f61362010-05-21 11:50:48 +01005927 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
5928 // convention.
5929 if (key_.is(r1)) {
5930 __ Swap(r0, r1, ip);
5931 }
5932
Steve Block6ded16b2010-05-10 14:33:55 +01005933 // The rest of the instructions in the deferred code must be together.
5934 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01005935 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01005936 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
5937 __ Call(ic, RelocInfo::CODE_TARGET);
5938 // The call must be followed by a nop instruction to indicate that the
5939 // keyed load has been inlined.
5940 __ nop(PROPERTY_ACCESS_INLINED);
5941
5942 // Block the constant pool for one more instruction after leaving this
5943 // constant pool block scope to include the branch instruction ending the
5944 // deferred code.
5945 __ BlockConstPoolFor(1);
5946 }
5947}
5948
5949
5950class DeferredReferenceSetKeyedValue: public DeferredCode {
5951 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01005952 DeferredReferenceSetKeyedValue(Register value,
5953 Register key,
5954 Register receiver)
5955 : value_(value), key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01005956 set_comment("[ DeferredReferenceSetKeyedValue");
5957 }
5958
5959 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01005960
5961 private:
5962 Register value_;
5963 Register key_;
5964 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01005965};
5966
5967
5968void DeferredReferenceSetKeyedValue::Generate() {
5969 Register scratch1 = VirtualFrame::scratch0();
5970 Register scratch2 = VirtualFrame::scratch1();
5971 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
5972 __ IncrementCounter(
5973 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
5974
Leon Clarkef7060e22010-06-03 12:02:55 +01005975 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
5976 // calling convention.
5977 if (value_.is(r1)) {
5978 __ Swap(r0, r1, ip);
5979 }
5980 ASSERT(receiver_.is(r2));
5981
Steve Block6ded16b2010-05-10 14:33:55 +01005982 // The rest of the instructions in the deferred code must be together.
5983 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01005984 // Call keyed store IC. It has the arguments value, key and receiver in r0,
5985 // r1 and r2.
Steve Block6ded16b2010-05-10 14:33:55 +01005986 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
5987 __ Call(ic, RelocInfo::CODE_TARGET);
5988 // The call must be followed by a nop instruction to indicate that the
5989 // keyed store has been inlined.
5990 __ nop(PROPERTY_ACCESS_INLINED);
5991
5992 // Block the constant pool for one more instruction after leaving this
5993 // constant pool block scope to include the branch instruction ending the
5994 // deferred code.
5995 __ BlockConstPoolFor(1);
5996 }
5997}
5998
5999
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006000// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006001void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6002 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6003 Comment cmnt(masm(), "[ Load from named Property");
6004 // Setup the name register and call load IC.
6005 frame_->CallLoadIC(name,
6006 is_contextual
6007 ? RelocInfo::CODE_TARGET_CONTEXT
6008 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006009 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006010 } else {
6011 // Inline the in-object property case.
6012 Comment cmnt(masm(), "[ Inlined named property load");
6013
6014 // Counter will be decremented in the deferred code. Placed here to avoid
6015 // having it in the instruction stream below where patching will occur.
6016 __ IncrementCounter(&Counters::named_load_inline, 1,
6017 frame_->scratch0(), frame_->scratch1());
6018
6019 // The following instructions are the inlined load of an in-object property.
6020 // Parts of this code is patched, so the exact instructions generated needs
6021 // to be fixed. Therefore the instruction pool is blocked when generating
6022 // this code
6023
6024 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006025 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006026
6027 DeferredReferenceGetNamedValue* deferred =
Leon Clarkef7060e22010-06-03 12:02:55 +01006028 new DeferredReferenceGetNamedValue(receiver, name);
Steve Block6ded16b2010-05-10 14:33:55 +01006029
6030#ifdef DEBUG
6031 int kInlinedNamedLoadInstructions = 7;
6032 Label check_inlined_codesize;
6033 masm_->bind(&check_inlined_codesize);
6034#endif
6035
6036 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6037 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01006038 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006039 deferred->Branch(eq);
6040
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006041 Register scratch = VirtualFrame::scratch0();
6042 Register scratch2 = VirtualFrame::scratch1();
6043
Steve Block6ded16b2010-05-10 14:33:55 +01006044 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006045 // code. Therefore we can't use a LoadRoot call.
6046 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6047 __ mov(scratch2, Operand(Factory::null_value()));
6048 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006049 deferred->Branch(ne);
6050
6051 // Initially use an invalid index. The index will be patched by the
6052 // inline cache code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006053 __ ldr(receiver, MemOperand(receiver, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01006054
6055 // Make sure that the expected number of instructions are generated.
6056 ASSERT_EQ(kInlinedNamedLoadInstructions,
6057 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6058 }
6059
6060 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006061 // At this point the receiver register has the result, either from the
6062 // deferred code or from the inlined code.
6063 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006064 }
6065}
6066
6067
6068void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6069#ifdef DEBUG
6070 int expected_height = frame_->height() - (is_contextual ? 1 : 2);
6071#endif
6072 frame_->CallStoreIC(name, is_contextual);
6073
6074 ASSERT_EQ(expected_height, frame_->height());
6075}
6076
6077
6078void CodeGenerator::EmitKeyedLoad() {
6079 if (loop_nesting() == 0) {
6080 Comment cmnt(masm_, "[ Load from keyed property");
6081 frame_->CallKeyedLoadIC();
6082 } else {
6083 // Inline the keyed load.
6084 Comment cmnt(masm_, "[ Inlined load from keyed property");
6085
6086 // Counter will be decremented in the deferred code. Placed here to avoid
6087 // having it in the instruction stream below where patching will occur.
6088 __ IncrementCounter(&Counters::keyed_load_inline, 1,
6089 frame_->scratch0(), frame_->scratch1());
6090
Kristian Monsen25f61362010-05-21 11:50:48 +01006091 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006092 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006093 Register key = frame_->PopToRegister();
6094 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01006095 VirtualFrame::SpilledScope spilled(frame_);
6096
Kristian Monsen25f61362010-05-21 11:50:48 +01006097 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01006098 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01006099 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006100
6101 // Check that the receiver is a heap object.
6102 __ tst(receiver, Operand(kSmiTagMask));
6103 deferred->Branch(eq);
6104
6105 // The following instructions are the part of the inlined load keyed
6106 // property code which can be patched. Therefore the exact number of
6107 // instructions generated need to be fixed, so the constant pool is blocked
6108 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01006109 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6110 Register scratch1 = VirtualFrame::scratch0();
6111 Register scratch2 = VirtualFrame::scratch1();
6112 // Check the map. The null map used below is patched by the inline cache
6113 // code.
6114 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006115
6116 // Check that the key is a smi.
6117 if (!key_is_known_smi) {
6118 __ tst(key, Operand(kSmiTagMask));
6119 deferred->Branch(ne);
6120 }
6121
Kristian Monsen25f61362010-05-21 11:50:48 +01006122#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006123 Label check_inlined_codesize;
6124 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01006125#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006126 __ mov(scratch2, Operand(Factory::null_value()));
6127 __ cmp(scratch1, scratch2);
6128 deferred->Branch(ne);
6129
Steve Block6ded16b2010-05-10 14:33:55 +01006130 // Get the elements array from the receiver and check that it
6131 // is not a dictionary.
6132 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
6133 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
6134 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
6135 __ cmp(scratch2, ip);
6136 deferred->Branch(ne);
6137
6138 // Check that key is within bounds. Use unsigned comparison to handle
6139 // negative keys.
6140 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006141 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01006142 deferred->Branch(ls); // Unsigned less equal.
6143
6144 // Load and check that the result is not the hole (key is a smi).
6145 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
6146 __ add(scratch1,
6147 scratch1,
6148 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01006149 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01006150 MemOperand(scratch1, key, LSL,
6151 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01006152 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006153 deferred->Branch(eq);
6154
Kristian Monsen25f61362010-05-21 11:50:48 +01006155 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01006156 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01006157 ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01006158 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6159 }
6160
6161 deferred->BindExit();
6162 }
6163}
6164
6165
6166void CodeGenerator::EmitKeyedStore(StaticType* key_type) {
Steve Block6ded16b2010-05-10 14:33:55 +01006167 // Generate inlined version of the keyed store if the code is in a loop
6168 // and the key is likely to be a smi.
6169 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
6170 // Inline the keyed store.
6171 Comment cmnt(masm_, "[ Inlined store to keyed property");
6172
Leon Clarkef7060e22010-06-03 12:02:55 +01006173 Register scratch1 = VirtualFrame::scratch0();
6174 Register scratch2 = VirtualFrame::scratch1();
6175 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01006176
6177 // Counter will be decremented in the deferred code. Placed here to avoid
6178 // having it in the instruction stream below where patching will occur.
6179 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01006180 scratch1, scratch2);
6181
6182 // Load the value, key and receiver from the stack.
6183 Register value = frame_->PopToRegister();
6184 Register key = frame_->PopToRegister(value);
6185 Register receiver = r2;
6186 frame_->EmitPop(receiver);
6187 VirtualFrame::SpilledScope spilled(frame_);
6188
6189 // The deferred code expects value, key and receiver in registers.
6190 DeferredReferenceSetKeyedValue* deferred =
6191 new DeferredReferenceSetKeyedValue(value, key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006192
6193 // Check that the value is a smi. As this inlined code does not set the
6194 // write barrier it is only possible to store smi values.
Leon Clarkef7060e22010-06-03 12:02:55 +01006195 __ tst(value, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006196 deferred->Branch(ne);
6197
Steve Block6ded16b2010-05-10 14:33:55 +01006198 // Check that the key is a smi.
Leon Clarkef7060e22010-06-03 12:02:55 +01006199 __ tst(key, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006200 deferred->Branch(ne);
6201
6202 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01006203 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006204 deferred->Branch(eq);
6205
6206 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01006207 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01006208 deferred->Branch(ne);
6209
6210 // Check that the key is within bounds. Both the key and the length of
6211 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01006212 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
6213 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01006214 deferred->Branch(ls); // Unsigned less equal.
6215
6216 // The following instructions are the part of the inlined store keyed
6217 // property code which can be patched. Therefore the exact number of
6218 // instructions generated need to be fixed, so the constant pool is blocked
6219 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01006220 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6221 // Get the elements array from the receiver and check that it
6222 // is not a dictionary.
Leon Clarkef7060e22010-06-03 12:02:55 +01006223 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
6224 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01006225 // Read the fixed array map from the constant pool (not from the root
6226 // array) so that the value can be patched. When debugging, we patch this
6227 // comparison to always fail so that we will hit the IC call in the
6228 // deferred code which will allow the debugger to break for fast case
6229 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01006230#ifdef DEBUG
6231 Label check_inlined_codesize;
6232 masm_->bind(&check_inlined_codesize);
6233#endif
6234 __ mov(scratch3, Operand(Factory::fixed_array_map()));
6235 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01006236 deferred->Branch(ne);
6237
6238 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01006239 __ add(scratch1, scratch1,
6240 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
6241 __ str(value,
6242 MemOperand(scratch1, key, LSL,
6243 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01006244
6245 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01006246 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01006247 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6248 }
6249
6250 deferred->BindExit();
6251 } else {
6252 frame()->CallKeyedStoreIC();
6253 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006254}
6255
6256
Steve Blocka7e24c12009-10-30 11:49:00 +00006257#ifdef DEBUG
6258bool CodeGenerator::HasValidEntryRegisters() { return true; }
6259#endif
6260
6261
6262#undef __
6263#define __ ACCESS_MASM(masm)
6264
6265
6266Handle<String> Reference::GetName() {
6267 ASSERT(type_ == NAMED);
6268 Property* property = expression_->AsProperty();
6269 if (property == NULL) {
6270 // Global variable reference treated as a named property reference.
6271 VariableProxy* proxy = expression_->AsVariableProxy();
6272 ASSERT(proxy->AsVariable() != NULL);
6273 ASSERT(proxy->AsVariable()->is_global());
6274 return proxy->name();
6275 } else {
6276 Literal* raw_name = property->key()->AsLiteral();
6277 ASSERT(raw_name != NULL);
6278 return Handle<String>(String::cast(*raw_name->handle()));
6279 }
6280}
6281
6282
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006283void Reference::DupIfPersist() {
6284 if (persist_after_get_) {
6285 switch (type_) {
6286 case KEYED:
6287 cgen_->frame()->Dup2();
6288 break;
6289 case NAMED:
6290 cgen_->frame()->Dup();
6291 // Fall through.
6292 case UNLOADED:
6293 case ILLEGAL:
6294 case SLOT:
6295 // Do nothing.
6296 ;
6297 }
6298 } else {
6299 set_unloaded();
6300 }
6301}
6302
6303
Steve Blockd0582a62009-12-15 09:54:21 +00006304void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006305 ASSERT(cgen_->HasValidEntryRegisters());
6306 ASSERT(!is_illegal());
6307 ASSERT(!cgen_->has_cc());
6308 MacroAssembler* masm = cgen_->masm();
6309 Property* property = expression_->AsProperty();
6310 if (property != NULL) {
6311 cgen_->CodeForSourcePosition(property->position());
6312 }
6313
6314 switch (type_) {
6315 case SLOT: {
6316 Comment cmnt(masm, "[ Load from Slot");
6317 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6318 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006319 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01006320 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00006321 break;
6322 }
6323
6324 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00006325 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01006326 bool is_global = var != NULL;
6327 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006328 Handle<String> name = GetName();
6329 DupIfPersist();
6330 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00006331 break;
6332 }
6333
6334 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01006335 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006336 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01006337 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00006338 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00006339 break;
6340 }
6341
6342 default:
6343 UNREACHABLE();
6344 }
6345}
6346
6347
6348void Reference::SetValue(InitState init_state) {
6349 ASSERT(!is_illegal());
6350 ASSERT(!cgen_->has_cc());
6351 MacroAssembler* masm = cgen_->masm();
6352 VirtualFrame* frame = cgen_->frame();
6353 Property* property = expression_->AsProperty();
6354 if (property != NULL) {
6355 cgen_->CodeForSourcePosition(property->position());
6356 }
6357
6358 switch (type_) {
6359 case SLOT: {
6360 Comment cmnt(masm, "[ Store to Slot");
6361 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
Leon Clarkee46be812010-01-19 14:06:41 +00006362 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01006363 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006364 break;
6365 }
6366
6367 case NAMED: {
6368 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01006369 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006370 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00006371 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006372 break;
6373 }
6374
6375 case KEYED: {
6376 Comment cmnt(masm, "[ Store to keyed Property");
6377 Property* property = expression_->AsProperty();
6378 ASSERT(property != NULL);
6379 cgen_->CodeForSourcePosition(property->position());
Steve Block6ded16b2010-05-10 14:33:55 +01006380 cgen_->EmitKeyedStore(property->key()->type());
Steve Blocka7e24c12009-10-30 11:49:00 +00006381 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01006382 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006383 break;
6384 }
6385
6386 default:
6387 UNREACHABLE();
6388 }
6389}
6390
6391
Leon Clarkee46be812010-01-19 14:06:41 +00006392void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01006393 // Create a new closure from the given function info in new
6394 // space. Set the context to the current context in cp.
Leon Clarkee46be812010-01-19 14:06:41 +00006395 Label gc;
6396
Steve Block6ded16b2010-05-10 14:33:55 +01006397 // Pop the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00006398 __ pop(r3);
6399
6400 // Attempt to allocate new JSFunction in new space.
Kristian Monsen25f61362010-05-21 11:50:48 +01006401 __ AllocateInNewSpace(JSFunction::kSize,
Leon Clarkee46be812010-01-19 14:06:41 +00006402 r0,
6403 r1,
6404 r2,
6405 &gc,
6406 TAG_OBJECT);
6407
6408 // Compute the function map in the current global context and set that
6409 // as the map of the allocated object.
6410 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
6411 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
6412 __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
6413 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
6414
Steve Block6ded16b2010-05-10 14:33:55 +01006415 // Initialize the rest of the function. We don't have to update the
6416 // write barrier because the allocated object is in new space.
6417 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
6418 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
6419 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset));
6420 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset));
6421 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
6422 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
6423 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
6424 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00006425
Steve Block6ded16b2010-05-10 14:33:55 +01006426 // Return result. The argument function info has been popped already.
Leon Clarkee46be812010-01-19 14:06:41 +00006427 __ Ret();
6428
6429 // Create a new closure through the slower runtime call.
6430 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01006431 __ Push(cp, r3);
6432 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00006433}
6434
6435
6436void FastNewContextStub::Generate(MacroAssembler* masm) {
6437 // Try to allocate the context in new space.
6438 Label gc;
6439 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
6440
6441 // Attempt to allocate the context in new space.
Kristian Monsen25f61362010-05-21 11:50:48 +01006442 __ AllocateInNewSpace(FixedArray::SizeFor(length),
Leon Clarkee46be812010-01-19 14:06:41 +00006443 r0,
6444 r1,
6445 r2,
6446 &gc,
6447 TAG_OBJECT);
6448
6449 // Load the function from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00006450 __ ldr(r3, MemOperand(sp, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00006451
6452 // Setup the object header.
6453 __ LoadRoot(r2, Heap::kContextMapRootIndex);
6454 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006455 __ mov(r2, Operand(Smi::FromInt(length)));
6456 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00006457
6458 // Setup the fixed slots.
6459 __ mov(r1, Operand(Smi::FromInt(0)));
6460 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
6461 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
6462 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
6463 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
6464
6465 // Copy the global object from the surrounding context.
6466 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
6467 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
6468
6469 // Initialize the rest of the slots to undefined.
6470 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
6471 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
6472 __ str(r1, MemOperand(r0, Context::SlotOffset(i)));
6473 }
6474
6475 // Remove the on-stack argument and return.
6476 __ mov(cp, r0);
6477 __ pop();
6478 __ Ret();
6479
6480 // Need to collect. Call into runtime system.
6481 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01006482 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00006483}
6484
6485
Andrei Popescu402d9372010-02-26 13:31:12 +00006486void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
6487 // Stack layout on entry:
6488 //
6489 // [sp]: constant elements.
6490 // [sp + kPointerSize]: literal index.
6491 // [sp + (2 * kPointerSize)]: literals array.
6492
6493 // All sizes here are multiples of kPointerSize.
6494 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
6495 int size = JSArray::kSize + elements_size;
6496
6497 // Load boilerplate object into r3 and check if we need to create a
6498 // boilerplate.
6499 Label slow_case;
6500 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
6501 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
6502 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
6503 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
6504 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6505 __ cmp(r3, ip);
6506 __ b(eq, &slow_case);
6507
6508 // Allocate both the JS array and the elements array in one big
6509 // allocation. This avoids multiple limit checks.
Kristian Monsen25f61362010-05-21 11:50:48 +01006510 __ AllocateInNewSpace(size,
Andrei Popescu402d9372010-02-26 13:31:12 +00006511 r0,
6512 r1,
6513 r2,
6514 &slow_case,
6515 TAG_OBJECT);
6516
6517 // Copy the JS array part.
6518 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
6519 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
6520 __ ldr(r1, FieldMemOperand(r3, i));
6521 __ str(r1, FieldMemOperand(r0, i));
6522 }
6523 }
6524
6525 if (length_ > 0) {
6526 // Get hold of the elements array of the boilerplate and setup the
6527 // elements pointer in the resulting object.
6528 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
6529 __ add(r2, r0, Operand(JSArray::kSize));
6530 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
6531
6532 // Copy the elements array.
6533 for (int i = 0; i < elements_size; i += kPointerSize) {
6534 __ ldr(r1, FieldMemOperand(r3, i));
6535 __ str(r1, FieldMemOperand(r2, i));
6536 }
6537 }
6538
6539 // Return and remove the on-stack parameters.
6540 __ add(sp, sp, Operand(3 * kPointerSize));
6541 __ Ret();
6542
6543 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01006544 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006545}
6546
6547
6548// Takes a Smi and converts to an IEEE 64 bit floating point value in two
6549// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
6550// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
6551// scratch register. Destroys the source register. No GC occurs during this
6552// stub so you don't have to set up the frame.
6553class ConvertToDoubleStub : public CodeStub {
6554 public:
6555 ConvertToDoubleStub(Register result_reg_1,
6556 Register result_reg_2,
6557 Register source_reg,
6558 Register scratch_reg)
6559 : result1_(result_reg_1),
6560 result2_(result_reg_2),
6561 source_(source_reg),
6562 zeros_(scratch_reg) { }
6563
6564 private:
6565 Register result1_;
6566 Register result2_;
6567 Register source_;
6568 Register zeros_;
6569
6570 // Minor key encoding in 16 bits.
6571 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
6572 class OpBits: public BitField<Token::Value, 2, 14> {};
6573
6574 Major MajorKey() { return ConvertToDouble; }
6575 int MinorKey() {
6576 // Encode the parameters in a unique 16 bit value.
6577 return result1_.code() +
6578 (result2_.code() << 4) +
6579 (source_.code() << 8) +
6580 (zeros_.code() << 12);
6581 }
6582
6583 void Generate(MacroAssembler* masm);
6584
6585 const char* GetName() { return "ConvertToDoubleStub"; }
6586
6587#ifdef DEBUG
6588 void Print() { PrintF("ConvertToDoubleStub\n"); }
6589#endif
6590};
6591
6592
6593void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
6594#ifndef BIG_ENDIAN_FLOATING_POINT
6595 Register exponent = result1_;
6596 Register mantissa = result2_;
6597#else
6598 Register exponent = result2_;
6599 Register mantissa = result1_;
6600#endif
6601 Label not_special;
6602 // Convert from Smi to integer.
6603 __ mov(source_, Operand(source_, ASR, kSmiTagSize));
6604 // Move sign bit from source to destination. This works because the sign bit
6605 // in the exponent word of the double has the same position and polarity as
6606 // the 2's complement sign bit in a Smi.
6607 ASSERT(HeapNumber::kSignMask == 0x80000000u);
6608 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
6609 // Subtract from 0 if source was negative.
6610 __ rsb(source_, source_, Operand(0), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01006611
6612 // We have -1, 0 or 1, which we treat specially. Register source_ contains
6613 // absolute value: it is either equal to 1 (special case of -1 and 1),
6614 // greater than 1 (not a special case) or less than 1 (special case of 0).
Steve Blocka7e24c12009-10-30 11:49:00 +00006615 __ cmp(source_, Operand(1));
6616 __ b(gt, &not_special);
6617
Steve Blocka7e24c12009-10-30 11:49:00 +00006618 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
6619 static const uint32_t exponent_word_for_1 =
6620 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
Steve Block6ded16b2010-05-10 14:33:55 +01006621 __ orr(exponent, exponent, Operand(exponent_word_for_1), LeaveCC, eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00006622 // 1, 0 and -1 all have 0 for the second word.
6623 __ mov(mantissa, Operand(0));
6624 __ Ret();
6625
6626 __ bind(&not_special);
Steve Block6ded16b2010-05-10 14:33:55 +01006627 // Count leading zeros. Uses mantissa for a scratch register on pre-ARM5.
Steve Blocka7e24c12009-10-30 11:49:00 +00006628 // Gets the wrong answer for 0, but we already checked for that case above.
Steve Block6ded16b2010-05-10 14:33:55 +01006629 __ CountLeadingZeros(source_, mantissa, zeros_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006630 // Compute exponent and or it into the exponent register.
Steve Block6ded16b2010-05-10 14:33:55 +01006631 // We use mantissa as a scratch register here.
Steve Blocka7e24c12009-10-30 11:49:00 +00006632 __ rsb(mantissa, zeros_, Operand(31 + HeapNumber::kExponentBias));
6633 __ orr(exponent,
6634 exponent,
6635 Operand(mantissa, LSL, HeapNumber::kExponentShift));
6636 // Shift up the source chopping the top bit off.
6637 __ add(zeros_, zeros_, Operand(1));
6638 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
6639 __ mov(source_, Operand(source_, LSL, zeros_));
6640 // Compute lower part of fraction (last 12 bits).
6641 __ mov(mantissa, Operand(source_, LSL, HeapNumber::kMantissaBitsInTopWord));
6642 // And the top (top 20 bits).
6643 __ orr(exponent,
6644 exponent,
6645 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord));
6646 __ Ret();
6647}
6648
6649
Steve Blocka7e24c12009-10-30 11:49:00 +00006650// See comment for class.
Steve Blockd0582a62009-12-15 09:54:21 +00006651void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006652 Label max_negative_int;
6653 // the_int_ has the answer which is a signed int32 but not a Smi.
6654 // We test for the special value that has a different exponent. This test
6655 // has the neat side effect of setting the flags according to the sign.
6656 ASSERT(HeapNumber::kSignMask == 0x80000000u);
6657 __ cmp(the_int_, Operand(0x80000000u));
6658 __ b(eq, &max_negative_int);
6659 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
6660 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
6661 uint32_t non_smi_exponent =
6662 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
6663 __ mov(scratch_, Operand(non_smi_exponent));
6664 // Set the sign bit in scratch_ if the value was negative.
6665 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs);
6666 // Subtract from 0 if the value was negative.
6667 __ rsb(the_int_, the_int_, Operand(0), LeaveCC, cs);
6668 // We should be masking the implict first digit of the mantissa away here,
6669 // but it just ends up combining harmlessly with the last digit of the
6670 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
6671 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
6672 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
6673 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
6674 __ orr(scratch_, scratch_, Operand(the_int_, LSR, shift_distance));
6675 __ str(scratch_, FieldMemOperand(the_heap_number_,
6676 HeapNumber::kExponentOffset));
6677 __ mov(scratch_, Operand(the_int_, LSL, 32 - shift_distance));
6678 __ str(scratch_, FieldMemOperand(the_heap_number_,
6679 HeapNumber::kMantissaOffset));
6680 __ Ret();
6681
6682 __ bind(&max_negative_int);
6683 // The max negative int32 is stored as a positive number in the mantissa of
6684 // a double because it uses a sign bit instead of using two's complement.
6685 // The actual mantissa bits stored are all 0 because the implicit most
6686 // significant 1 bit is not stored.
6687 non_smi_exponent += 1 << HeapNumber::kExponentShift;
6688 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent));
6689 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
6690 __ mov(ip, Operand(0));
6691 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
6692 __ Ret();
6693}
6694
6695
6696// Handle the case where the lhs and rhs are the same object.
6697// Equality is almost reflexive (everything but NaN), so this is a test
6698// for "identity and not NaN".
6699static void EmitIdenticalObjectComparison(MacroAssembler* masm,
6700 Label* slow,
Leon Clarkee46be812010-01-19 14:06:41 +00006701 Condition cc,
6702 bool never_nan_nan) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006703 Label not_identical;
Leon Clarkee46be812010-01-19 14:06:41 +00006704 Label heap_number, return_equal;
6705 Register exp_mask_reg = r5;
Steve Block6ded16b2010-05-10 14:33:55 +01006706 __ cmp(r0, r1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006707 __ b(ne, &not_identical);
6708
Leon Clarkee46be812010-01-19 14:06:41 +00006709 // The two objects are identical. If we know that one of them isn't NaN then
6710 // we now know they test equal.
6711 if (cc != eq || !never_nan_nan) {
6712 __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006713
Leon Clarkee46be812010-01-19 14:06:41 +00006714 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
6715 // so we do the second best thing - test it ourselves.
6716 // They are both equal and they are not both Smis so both of them are not
6717 // Smis. If it's not a heap number, then return equal.
6718 if (cc == lt || cc == gt) {
6719 __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006720 __ b(ge, slow);
Leon Clarkee46be812010-01-19 14:06:41 +00006721 } else {
6722 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
6723 __ b(eq, &heap_number);
6724 // Comparing JS objects with <=, >= is complicated.
6725 if (cc != eq) {
6726 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
6727 __ b(ge, slow);
6728 // Normally here we fall through to return_equal, but undefined is
6729 // special: (undefined == undefined) == true, but
6730 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
6731 if (cc == le || cc == ge) {
6732 __ cmp(r4, Operand(ODDBALL_TYPE));
6733 __ b(ne, &return_equal);
6734 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006735 __ cmp(r0, r2);
Leon Clarkee46be812010-01-19 14:06:41 +00006736 __ b(ne, &return_equal);
6737 if (cc == le) {
6738 // undefined <= undefined should fail.
6739 __ mov(r0, Operand(GREATER));
6740 } else {
6741 // undefined >= undefined should fail.
6742 __ mov(r0, Operand(LESS));
6743 }
6744 __ mov(pc, Operand(lr)); // Return.
Steve Blockd0582a62009-12-15 09:54:21 +00006745 }
Steve Blockd0582a62009-12-15 09:54:21 +00006746 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006747 }
6748 }
Leon Clarkee46be812010-01-19 14:06:41 +00006749
Steve Blocka7e24c12009-10-30 11:49:00 +00006750 __ bind(&return_equal);
6751 if (cc == lt) {
6752 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
6753 } else if (cc == gt) {
6754 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
6755 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006756 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
Steve Blocka7e24c12009-10-30 11:49:00 +00006757 }
6758 __ mov(pc, Operand(lr)); // Return.
6759
Leon Clarkee46be812010-01-19 14:06:41 +00006760 if (cc != eq || !never_nan_nan) {
6761 // For less and greater we don't have to check for NaN since the result of
6762 // x < x is false regardless. For the others here is some code to check
6763 // for NaN.
6764 if (cc != lt && cc != gt) {
6765 __ bind(&heap_number);
6766 // It is a heap number, so return non-equal if it's NaN and equal if it's
6767 // not NaN.
Steve Blocka7e24c12009-10-30 11:49:00 +00006768
Leon Clarkee46be812010-01-19 14:06:41 +00006769 // The representation of NaN values has all exponent bits (52..62) set,
6770 // and not all mantissa bits (0..51) clear.
6771 // Read top bits of double representation (second word of value).
6772 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
6773 // Test that exponent bits are all set.
6774 __ and_(r3, r2, Operand(exp_mask_reg));
6775 __ cmp(r3, Operand(exp_mask_reg));
6776 __ b(ne, &return_equal);
6777
6778 // Shift out flag and all exponent bits, retaining only mantissa.
6779 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
6780 // Or with all low-bits of mantissa.
6781 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
6782 __ orr(r0, r3, Operand(r2), SetCC);
6783 // For equal we already have the right value in r0: Return zero (equal)
6784 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
6785 // not (it's a NaN). For <= and >= we need to load r0 with the failing
6786 // value if it's a NaN.
6787 if (cc != eq) {
6788 // All-zero means Infinity means equal.
6789 __ mov(pc, Operand(lr), LeaveCC, eq); // Return equal
6790 if (cc == le) {
6791 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
6792 } else {
6793 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
6794 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006795 }
Leon Clarkee46be812010-01-19 14:06:41 +00006796 __ mov(pc, Operand(lr)); // Return.
Steve Blocka7e24c12009-10-30 11:49:00 +00006797 }
Leon Clarkee46be812010-01-19 14:06:41 +00006798 // No fall through here.
Steve Blocka7e24c12009-10-30 11:49:00 +00006799 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006800
6801 __ bind(&not_identical);
6802}
6803
6804
6805// See comment at call site.
6806static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Leon Clarkee46be812010-01-19 14:06:41 +00006807 Label* lhs_not_nan,
Steve Blocka7e24c12009-10-30 11:49:00 +00006808 Label* slow,
6809 bool strict) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006810 Label rhs_is_smi;
Steve Blocka7e24c12009-10-30 11:49:00 +00006811 __ tst(r0, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00006812 __ b(eq, &rhs_is_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006813
Leon Clarked91b9f72010-01-27 17:25:45 +00006814 // Lhs is a Smi. Check whether the rhs is a heap number.
Steve Blocka7e24c12009-10-30 11:49:00 +00006815 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
6816 if (strict) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006817 // If rhs is not a number and lhs is a Smi then strict equality cannot
Steve Blocka7e24c12009-10-30 11:49:00 +00006818 // succeed. Return non-equal (r0 is already not zero)
6819 __ mov(pc, Operand(lr), LeaveCC, ne); // Return.
6820 } else {
6821 // Smi compared non-strictly with a non-Smi non-heap-number. Call
6822 // the runtime.
6823 __ b(ne, slow);
6824 }
6825
Leon Clarked91b9f72010-01-27 17:25:45 +00006826 // Lhs (r1) is a smi, rhs (r0) is a number.
Steve Blockd0582a62009-12-15 09:54:21 +00006827 if (CpuFeatures::IsSupported(VFP3)) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006828 // Convert lhs to a double in d7 .
Steve Blockd0582a62009-12-15 09:54:21 +00006829 CpuFeatures::Scope scope(VFP3);
Leon Clarked91b9f72010-01-27 17:25:45 +00006830 __ mov(r7, Operand(r1, ASR, kSmiTagSize));
6831 __ vmov(s15, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01006832 __ vcvt_f64_s32(d7, s15);
Leon Clarked91b9f72010-01-27 17:25:45 +00006833 // Load the double from rhs, tagged HeapNumber r0, to d6.
6834 __ sub(r7, r0, Operand(kHeapObjectTag));
6835 __ vldr(d6, r7, HeapNumber::kValueOffset);
Steve Blockd0582a62009-12-15 09:54:21 +00006836 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00006837 __ push(lr);
6838 // Convert lhs to a double in r2, r3.
Steve Blockd0582a62009-12-15 09:54:21 +00006839 __ mov(r7, Operand(r1));
6840 ConvertToDoubleStub stub1(r3, r2, r7, r6);
6841 __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00006842 // Load rhs to a double in r0, r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01006843 __ Ldrd(r0, r1, FieldMemOperand(r0, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00006844 __ pop(lr);
Steve Blockd0582a62009-12-15 09:54:21 +00006845 }
6846
Steve Blocka7e24c12009-10-30 11:49:00 +00006847 // We now have both loaded as doubles but we can skip the lhs nan check
Leon Clarked91b9f72010-01-27 17:25:45 +00006848 // since it's a smi.
Leon Clarkee46be812010-01-19 14:06:41 +00006849 __ jmp(lhs_not_nan);
Steve Blocka7e24c12009-10-30 11:49:00 +00006850
Leon Clarked91b9f72010-01-27 17:25:45 +00006851 __ bind(&rhs_is_smi);
6852 // Rhs is a smi. Check whether the non-smi lhs is a heap number.
Steve Blocka7e24c12009-10-30 11:49:00 +00006853 __ CompareObjectType(r1, r4, r4, HEAP_NUMBER_TYPE);
6854 if (strict) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006855 // If lhs is not a number and rhs is a smi then strict equality cannot
Steve Blocka7e24c12009-10-30 11:49:00 +00006856 // succeed. Return non-equal.
6857 __ mov(r0, Operand(1), LeaveCC, ne); // Non-zero indicates not equal.
6858 __ mov(pc, Operand(lr), LeaveCC, ne); // Return.
6859 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00006860 // Smi compared non-strictly with a non-smi non-heap-number. Call
Steve Blocka7e24c12009-10-30 11:49:00 +00006861 // the runtime.
6862 __ b(ne, slow);
6863 }
6864
Leon Clarked91b9f72010-01-27 17:25:45 +00006865 // Rhs (r0) is a smi, lhs (r1) is a heap number.
Steve Blockd0582a62009-12-15 09:54:21 +00006866 if (CpuFeatures::IsSupported(VFP3)) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006867 // Convert rhs to a double in d6 .
Steve Blockd0582a62009-12-15 09:54:21 +00006868 CpuFeatures::Scope scope(VFP3);
Leon Clarked91b9f72010-01-27 17:25:45 +00006869 // Load the double from lhs, tagged HeapNumber r1, to d7.
6870 __ sub(r7, r1, Operand(kHeapObjectTag));
6871 __ vldr(d7, r7, HeapNumber::kValueOffset);
6872 __ mov(r7, Operand(r0, ASR, kSmiTagSize));
6873 __ vmov(s13, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01006874 __ vcvt_f64_s32(d6, s13);
Steve Blockd0582a62009-12-15 09:54:21 +00006875 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00006876 __ push(lr);
6877 // Load lhs to a double in r2, r3.
Leon Clarkef7060e22010-06-03 12:02:55 +01006878 __ Ldrd(r2, r3, FieldMemOperand(r1, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00006879 // Convert rhs to a double in r0, r1.
Steve Blockd0582a62009-12-15 09:54:21 +00006880 __ mov(r7, Operand(r0));
6881 ConvertToDoubleStub stub2(r1, r0, r7, r6);
6882 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00006883 __ pop(lr);
Steve Blockd0582a62009-12-15 09:54:21 +00006884 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006885 // Fall through to both_loaded_as_doubles.
6886}
6887
6888
Leon Clarkee46be812010-01-19 14:06:41 +00006889void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006890 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
Leon Clarkee46be812010-01-19 14:06:41 +00006891 Register rhs_exponent = exp_first ? r0 : r1;
6892 Register lhs_exponent = exp_first ? r2 : r3;
6893 Register rhs_mantissa = exp_first ? r1 : r0;
6894 Register lhs_mantissa = exp_first ? r3 : r2;
Steve Blocka7e24c12009-10-30 11:49:00 +00006895 Label one_is_nan, neither_is_nan;
Leon Clarkee46be812010-01-19 14:06:41 +00006896 Label lhs_not_nan_exp_mask_is_loaded;
Steve Blocka7e24c12009-10-30 11:49:00 +00006897
6898 Register exp_mask_reg = r5;
6899
6900 __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006901 __ and_(r4, lhs_exponent, Operand(exp_mask_reg));
6902 __ cmp(r4, Operand(exp_mask_reg));
Leon Clarkee46be812010-01-19 14:06:41 +00006903 __ b(ne, &lhs_not_nan_exp_mask_is_loaded);
Steve Blocka7e24c12009-10-30 11:49:00 +00006904 __ mov(r4,
6905 Operand(lhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
6906 SetCC);
6907 __ b(ne, &one_is_nan);
6908 __ cmp(lhs_mantissa, Operand(0));
Leon Clarkee46be812010-01-19 14:06:41 +00006909 __ b(ne, &one_is_nan);
6910
6911 __ bind(lhs_not_nan);
6912 __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
6913 __ bind(&lhs_not_nan_exp_mask_is_loaded);
6914 __ and_(r4, rhs_exponent, Operand(exp_mask_reg));
6915 __ cmp(r4, Operand(exp_mask_reg));
6916 __ b(ne, &neither_is_nan);
6917 __ mov(r4,
6918 Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
6919 SetCC);
6920 __ b(ne, &one_is_nan);
6921 __ cmp(rhs_mantissa, Operand(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00006922 __ b(eq, &neither_is_nan);
6923
6924 __ bind(&one_is_nan);
6925 // NaN comparisons always fail.
6926 // Load whatever we need in r0 to make the comparison fail.
6927 if (cc == lt || cc == le) {
6928 __ mov(r0, Operand(GREATER));
6929 } else {
6930 __ mov(r0, Operand(LESS));
6931 }
6932 __ mov(pc, Operand(lr)); // Return.
6933
6934 __ bind(&neither_is_nan);
6935}
6936
6937
6938// See comment at call site.
6939static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
6940 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
Leon Clarkee46be812010-01-19 14:06:41 +00006941 Register rhs_exponent = exp_first ? r0 : r1;
6942 Register lhs_exponent = exp_first ? r2 : r3;
6943 Register rhs_mantissa = exp_first ? r1 : r0;
6944 Register lhs_mantissa = exp_first ? r3 : r2;
Steve Blocka7e24c12009-10-30 11:49:00 +00006945
6946 // r0, r1, r2, r3 have the two doubles. Neither is a NaN.
6947 if (cc == eq) {
6948 // Doubles are not equal unless they have the same bit pattern.
6949 // Exception: 0 and -0.
Leon Clarkee46be812010-01-19 14:06:41 +00006950 __ cmp(rhs_mantissa, Operand(lhs_mantissa));
6951 __ orr(r0, rhs_mantissa, Operand(lhs_mantissa), LeaveCC, ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00006952 // Return non-zero if the numbers are unequal.
6953 __ mov(pc, Operand(lr), LeaveCC, ne);
6954
Leon Clarkee46be812010-01-19 14:06:41 +00006955 __ sub(r0, rhs_exponent, Operand(lhs_exponent), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006956 // If exponents are equal then return 0.
6957 __ mov(pc, Operand(lr), LeaveCC, eq);
6958
6959 // Exponents are unequal. The only way we can return that the numbers
6960 // are equal is if one is -0 and the other is 0. We already dealt
6961 // with the case where both are -0 or both are 0.
6962 // We start by seeing if the mantissas (that are equal) or the bottom
6963 // 31 bits of the rhs exponent are non-zero. If so we return not
6964 // equal.
Leon Clarkee46be812010-01-19 14:06:41 +00006965 __ orr(r4, lhs_mantissa, Operand(lhs_exponent, LSL, kSmiTagSize), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006966 __ mov(r0, Operand(r4), LeaveCC, ne);
6967 __ mov(pc, Operand(lr), LeaveCC, ne); // Return conditionally.
6968 // Now they are equal if and only if the lhs exponent is zero in its
6969 // low 31 bits.
Leon Clarkee46be812010-01-19 14:06:41 +00006970 __ mov(r0, Operand(rhs_exponent, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00006971 __ mov(pc, Operand(lr));
6972 } else {
6973 // Call a native function to do a comparison between two non-NaNs.
6974 // Call C routine that may not cause GC or other trouble.
Steve Block6ded16b2010-05-10 14:33:55 +01006975 __ push(lr);
6976 __ PrepareCallCFunction(4, r5); // Two doubles count as 4 arguments.
6977 __ CallCFunction(ExternalReference::compare_doubles(), 4);
6978 __ pop(pc); // Return.
Steve Blocka7e24c12009-10-30 11:49:00 +00006979 }
6980}
6981
6982
6983// See comment at call site.
6984static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm) {
6985 // If either operand is a JSObject or an oddball value, then they are
6986 // not equal since their pointers are different.
6987 // There is no test for undetectability in strict equality.
6988 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6989 Label first_non_object;
6990 // Get the type of the first operand into r2 and compare it with
6991 // FIRST_JS_OBJECT_TYPE.
6992 __ CompareObjectType(r0, r2, r2, FIRST_JS_OBJECT_TYPE);
6993 __ b(lt, &first_non_object);
6994
6995 // Return non-zero (r0 is not zero)
6996 Label return_not_equal;
6997 __ bind(&return_not_equal);
6998 __ mov(pc, Operand(lr)); // Return.
6999
7000 __ bind(&first_non_object);
7001 // Check for oddballs: true, false, null, undefined.
7002 __ cmp(r2, Operand(ODDBALL_TYPE));
7003 __ b(eq, &return_not_equal);
7004
7005 __ CompareObjectType(r1, r3, r3, FIRST_JS_OBJECT_TYPE);
7006 __ b(ge, &return_not_equal);
7007
7008 // Check for oddballs: true, false, null, undefined.
7009 __ cmp(r3, Operand(ODDBALL_TYPE));
7010 __ b(eq, &return_not_equal);
Leon Clarkee46be812010-01-19 14:06:41 +00007011
7012 // Now that we have the types we might as well check for symbol-symbol.
7013 // Ensure that no non-strings have the symbol bit set.
7014 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
7015 ASSERT(kSymbolTag != 0);
7016 __ and_(r2, r2, Operand(r3));
7017 __ tst(r2, Operand(kIsSymbolMask));
7018 __ b(ne, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007019}
7020
7021
7022// See comment at call site.
7023static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
7024 Label* both_loaded_as_doubles,
7025 Label* not_heap_numbers,
7026 Label* slow) {
Leon Clarkee46be812010-01-19 14:06:41 +00007027 __ CompareObjectType(r0, r3, r2, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007028 __ b(ne, not_heap_numbers);
Leon Clarkee46be812010-01-19 14:06:41 +00007029 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
7030 __ cmp(r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00007031 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
7032
7033 // Both are heap numbers. Load them up then jump to the code we have
7034 // for that.
Leon Clarked91b9f72010-01-27 17:25:45 +00007035 if (CpuFeatures::IsSupported(VFP3)) {
7036 CpuFeatures::Scope scope(VFP3);
7037 __ sub(r7, r0, Operand(kHeapObjectTag));
7038 __ vldr(d6, r7, HeapNumber::kValueOffset);
7039 __ sub(r7, r1, Operand(kHeapObjectTag));
7040 __ vldr(d7, r7, HeapNumber::kValueOffset);
7041 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01007042 __ Ldrd(r2, r3, FieldMemOperand(r1, HeapNumber::kValueOffset));
7043 __ Ldrd(r0, r1, FieldMemOperand(r0, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00007044 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007045 __ jmp(both_loaded_as_doubles);
7046}
7047
7048
7049// Fast negative check for symbol-to-symbol equality.
7050static void EmitCheckForSymbols(MacroAssembler* masm, Label* slow) {
7051 // r2 is object type of r0.
Leon Clarkee46be812010-01-19 14:06:41 +00007052 // Ensure that no non-strings have the symbol bit set.
7053 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
7054 ASSERT(kSymbolTag != 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007055 __ tst(r2, Operand(kIsSymbolMask));
7056 __ b(eq, slow);
Leon Clarkee46be812010-01-19 14:06:41 +00007057 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
7058 __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007059 __ tst(r3, Operand(kIsSymbolMask));
7060 __ b(eq, slow);
7061
7062 // Both are symbols. We already checked they weren't the same pointer
7063 // so they are not equal.
7064 __ mov(r0, Operand(1)); // Non-zero indicates not equal.
7065 __ mov(pc, Operand(lr)); // Return.
7066}
7067
7068
Steve Block6ded16b2010-05-10 14:33:55 +01007069void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
7070 Register object,
7071 Register result,
7072 Register scratch1,
7073 Register scratch2,
7074 Register scratch3,
7075 bool object_is_smi,
7076 Label* not_found) {
7077 // Use of registers. Register result is used as a temporary.
7078 Register number_string_cache = result;
7079 Register mask = scratch3;
7080
7081 // Load the number string cache.
7082 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
7083
7084 // Make the hash mask from the length of the number string cache. It
7085 // contains two elements (number and string) for each cache entry.
7086 __ ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007087 // Divide length by two (length is a smi).
7088 __ mov(mask, Operand(mask, ASR, kSmiTagSize + 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007089 __ sub(mask, mask, Operand(1)); // Make mask.
7090
7091 // Calculate the entry in the number string cache. The hash value in the
7092 // number string cache for smis is just the smi value, and the hash for
7093 // doubles is the xor of the upper and lower words. See
7094 // Heap::GetNumberStringCache.
7095 Label is_smi;
7096 Label load_result_from_cache;
7097 if (!object_is_smi) {
7098 __ BranchOnSmi(object, &is_smi);
7099 if (CpuFeatures::IsSupported(VFP3)) {
7100 CpuFeatures::Scope scope(VFP3);
7101 __ CheckMap(object,
7102 scratch1,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007103 Heap::kHeapNumberMapRootIndex,
Steve Block6ded16b2010-05-10 14:33:55 +01007104 not_found,
7105 true);
7106
7107 ASSERT_EQ(8, kDoubleSize);
7108 __ add(scratch1,
7109 object,
7110 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
7111 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
7112 __ eor(scratch1, scratch1, Operand(scratch2));
7113 __ and_(scratch1, scratch1, Operand(mask));
7114
7115 // Calculate address of entry in string cache: each entry consists
7116 // of two pointer sized fields.
7117 __ add(scratch1,
7118 number_string_cache,
7119 Operand(scratch1, LSL, kPointerSizeLog2 + 1));
7120
7121 Register probe = mask;
7122 __ ldr(probe,
7123 FieldMemOperand(scratch1, FixedArray::kHeaderSize));
7124 __ BranchOnSmi(probe, not_found);
7125 __ sub(scratch2, object, Operand(kHeapObjectTag));
7126 __ vldr(d0, scratch2, HeapNumber::kValueOffset);
7127 __ sub(probe, probe, Operand(kHeapObjectTag));
7128 __ vldr(d1, probe, HeapNumber::kValueOffset);
7129 __ vcmp(d0, d1);
7130 __ vmrs(pc);
7131 __ b(ne, not_found); // The cache did not contain this value.
7132 __ b(&load_result_from_cache);
7133 } else {
7134 __ b(not_found);
7135 }
7136 }
7137
7138 __ bind(&is_smi);
7139 Register scratch = scratch1;
7140 __ and_(scratch, mask, Operand(object, ASR, 1));
7141 // Calculate address of entry in string cache: each entry consists
7142 // of two pointer sized fields.
7143 __ add(scratch,
7144 number_string_cache,
7145 Operand(scratch, LSL, kPointerSizeLog2 + 1));
7146
7147 // Check if the entry is the smi we are looking for.
7148 Register probe = mask;
7149 __ ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
7150 __ cmp(object, probe);
7151 __ b(ne, not_found);
7152
7153 // Get the result from the cache.
7154 __ bind(&load_result_from_cache);
7155 __ ldr(result,
7156 FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
7157 __ IncrementCounter(&Counters::number_to_string_native,
7158 1,
7159 scratch1,
7160 scratch2);
7161}
7162
7163
7164void NumberToStringStub::Generate(MacroAssembler* masm) {
7165 Label runtime;
7166
7167 __ ldr(r1, MemOperand(sp, 0));
7168
7169 // Generate code to lookup number in the number string cache.
7170 GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime);
7171 __ add(sp, sp, Operand(1 * kPointerSize));
7172 __ Ret();
7173
7174 __ bind(&runtime);
7175 // Handle number to string in the runtime system if not found in the cache.
7176 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
7177}
7178
7179
7180void RecordWriteStub::Generate(MacroAssembler* masm) {
7181 __ RecordWriteHelper(object_, offset_, scratch_);
7182 __ Ret();
7183}
7184
7185
Leon Clarked91b9f72010-01-27 17:25:45 +00007186// On entry r0 (rhs) and r1 (lhs) are the values to be compared.
7187// On exit r0 is 0, positive or negative to indicate the result of
7188// the comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00007189void CompareStub::Generate(MacroAssembler* masm) {
7190 Label slow; // Call builtin.
Leon Clarkee46be812010-01-19 14:06:41 +00007191 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
Steve Blocka7e24c12009-10-30 11:49:00 +00007192
7193 // NOTICE! This code is only reached after a smi-fast-case check, so
7194 // it is certain that at least one operand isn't a smi.
7195
7196 // Handle the case where the objects are identical. Either returns the answer
7197 // or goes to slow. Only falls through if the objects were not identical.
Leon Clarkee46be812010-01-19 14:06:41 +00007198 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007199
7200 // If either is a Smi (we know that not both are), then they can only
7201 // be strictly equal if the other is a HeapNumber.
7202 ASSERT_EQ(0, kSmiTag);
7203 ASSERT_EQ(0, Smi::FromInt(0));
7204 __ and_(r2, r0, Operand(r1));
7205 __ tst(r2, Operand(kSmiTagMask));
7206 __ b(ne, &not_smis);
7207 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
7208 // 1) Return the answer.
7209 // 2) Go to slow.
7210 // 3) Fall through to both_loaded_as_doubles.
Leon Clarkee46be812010-01-19 14:06:41 +00007211 // 4) Jump to lhs_not_nan.
Steve Blocka7e24c12009-10-30 11:49:00 +00007212 // In cases 3 and 4 we have found out we were dealing with a number-number
Leon Clarked91b9f72010-01-27 17:25:45 +00007213 // comparison. If VFP3 is supported the double values of the numbers have
7214 // been loaded into d7 and d6. Otherwise, the double values have been loaded
7215 // into r0, r1, r2, and r3.
Leon Clarkee46be812010-01-19 14:06:41 +00007216 EmitSmiNonsmiComparison(masm, &lhs_not_nan, &slow, strict_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007217
7218 __ bind(&both_loaded_as_doubles);
Leon Clarked91b9f72010-01-27 17:25:45 +00007219 // The arguments have been converted to doubles and stored in d6 and d7, if
7220 // VFP3 is supported, or in r0, r1, r2, and r3.
Steve Blockd0582a62009-12-15 09:54:21 +00007221 if (CpuFeatures::IsSupported(VFP3)) {
Leon Clarkee46be812010-01-19 14:06:41 +00007222 __ bind(&lhs_not_nan);
Steve Blockd0582a62009-12-15 09:54:21 +00007223 CpuFeatures::Scope scope(VFP3);
Leon Clarkee46be812010-01-19 14:06:41 +00007224 Label no_nan;
Steve Blockd0582a62009-12-15 09:54:21 +00007225 // ARMv7 VFP3 instructions to implement double precision comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00007226 __ vcmp(d7, d6);
7227 __ vmrs(pc); // Move vector status bits to normal status bits.
7228 Label nan;
7229 __ b(vs, &nan);
7230 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
7231 __ mov(r0, Operand(LESS), LeaveCC, lt);
7232 __ mov(r0, Operand(GREATER), LeaveCC, gt);
7233 __ mov(pc, Operand(lr));
7234
7235 __ bind(&nan);
7236 // If one of the sides was a NaN then the v flag is set. Load r0 with
7237 // whatever it takes to make the comparison fail, since comparisons with NaN
7238 // always fail.
7239 if (cc_ == lt || cc_ == le) {
7240 __ mov(r0, Operand(GREATER));
7241 } else {
7242 __ mov(r0, Operand(LESS));
7243 }
Steve Blockd0582a62009-12-15 09:54:21 +00007244 __ mov(pc, Operand(lr));
7245 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00007246 // Checks for NaN in the doubles we have loaded. Can return the answer or
7247 // fall through if neither is a NaN. Also binds lhs_not_nan.
7248 EmitNanCheck(masm, &lhs_not_nan, cc_);
Steve Blockd0582a62009-12-15 09:54:21 +00007249 // Compares two doubles in r0, r1, r2, r3 that are not NaNs. Returns the
7250 // answer. Never falls through.
7251 EmitTwoNonNanDoubleComparison(masm, cc_);
7252 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007253
7254 __ bind(&not_smis);
7255 // At this point we know we are dealing with two different objects,
7256 // and neither of them is a Smi. The objects are in r0 and r1.
7257 if (strict_) {
7258 // This returns non-equal for some object types, or falls through if it
7259 // was not lucky.
7260 EmitStrictTwoHeapObjectCompare(masm);
7261 }
7262
7263 Label check_for_symbols;
Leon Clarked91b9f72010-01-27 17:25:45 +00007264 Label flat_string_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00007265 // Check for heap-number-heap-number comparison. Can jump to slow case,
7266 // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
7267 // that case. If the inputs are not doubles then jumps to check_for_symbols.
Leon Clarkee46be812010-01-19 14:06:41 +00007268 // In this case r2 will contain the type of r0. Never falls through.
Steve Blocka7e24c12009-10-30 11:49:00 +00007269 EmitCheckForTwoHeapNumbers(masm,
7270 &both_loaded_as_doubles,
7271 &check_for_symbols,
Leon Clarked91b9f72010-01-27 17:25:45 +00007272 &flat_string_check);
Steve Blocka7e24c12009-10-30 11:49:00 +00007273
7274 __ bind(&check_for_symbols);
Leon Clarkee46be812010-01-19 14:06:41 +00007275 // In the strict case the EmitStrictTwoHeapObjectCompare already took care of
7276 // symbols.
7277 if (cc_ == eq && !strict_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007278 // Either jumps to slow or returns the answer. Assumes that r2 is the type
7279 // of r0 on entry.
Leon Clarked91b9f72010-01-27 17:25:45 +00007280 EmitCheckForSymbols(masm, &flat_string_check);
Steve Blocka7e24c12009-10-30 11:49:00 +00007281 }
7282
Leon Clarked91b9f72010-01-27 17:25:45 +00007283 // Check for both being sequential ASCII strings, and inline if that is the
7284 // case.
7285 __ bind(&flat_string_check);
7286
7287 __ JumpIfNonSmisNotBothSequentialAsciiStrings(r0, r1, r2, r3, &slow);
7288
7289 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
7290 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
7291 r1,
7292 r0,
7293 r2,
7294 r3,
7295 r4,
7296 r5);
7297 // Never falls through to here.
7298
Steve Blocka7e24c12009-10-30 11:49:00 +00007299 __ bind(&slow);
Leon Clarked91b9f72010-01-27 17:25:45 +00007300
Steve Block6ded16b2010-05-10 14:33:55 +01007301 __ Push(r1, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007302 // Figure out which native to call and setup the arguments.
7303 Builtins::JavaScript native;
Steve Blocka7e24c12009-10-30 11:49:00 +00007304 if (cc_ == eq) {
7305 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
7306 } else {
7307 native = Builtins::COMPARE;
7308 int ncr; // NaN compare result
7309 if (cc_ == lt || cc_ == le) {
7310 ncr = GREATER;
7311 } else {
7312 ASSERT(cc_ == gt || cc_ == ge); // remaining cases
7313 ncr = LESS;
7314 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007315 __ mov(r0, Operand(Smi::FromInt(ncr)));
7316 __ push(r0);
7317 }
7318
7319 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
7320 // tagged as a small integer.
Leon Clarkee46be812010-01-19 14:06:41 +00007321 __ InvokeBuiltin(native, JUMP_JS);
Steve Blocka7e24c12009-10-30 11:49:00 +00007322}
7323
7324
Steve Blocka7e24c12009-10-30 11:49:00 +00007325// We fall into this code if the operands were Smis, but the result was
7326// not (eg. overflow). We branch into this code (to the not_smi label) if
7327// the operands were not both Smi. The operands are in r0 and r1. In order
7328// to call the C-implemented binary fp operation routines we need to end up
7329// with the double precision floating point operands in r0 and r1 (for the
7330// value in r1) and r2 and r3 (for the value in r0).
Steve Block6ded16b2010-05-10 14:33:55 +01007331void GenericBinaryOpStub::HandleBinaryOpSlowCases(
7332 MacroAssembler* masm,
7333 Label* not_smi,
7334 Register lhs,
7335 Register rhs,
7336 const Builtins::JavaScript& builtin) {
7337 Label slow, slow_reverse, do_the_call;
7338 bool use_fp_registers = CpuFeatures::IsSupported(VFP3) && Token::MOD != op_;
Steve Blockd0582a62009-12-15 09:54:21 +00007339
Steve Block6ded16b2010-05-10 14:33:55 +01007340 ASSERT((lhs.is(r0) && rhs.is(r1)) || (lhs.is(r1) && rhs.is(r0)));
7341
7342 if (ShouldGenerateSmiCode()) {
7343 // Smi-smi case (overflow).
7344 // Since both are Smis there is no heap number to overwrite, so allocate.
7345 // The new heap number is in r5. r6 and r7 are scratch.
7346 __ AllocateHeapNumber(r5, r6, r7, lhs.is(r0) ? &slow_reverse : &slow);
7347
7348 // If we have floating point hardware, inline ADD, SUB, MUL, and DIV,
7349 // using registers d7 and d6 for the double values.
7350 if (use_fp_registers) {
7351 CpuFeatures::Scope scope(VFP3);
7352 __ mov(r7, Operand(rhs, ASR, kSmiTagSize));
7353 __ vmov(s15, r7);
7354 __ vcvt_f64_s32(d7, s15);
7355 __ mov(r7, Operand(lhs, ASR, kSmiTagSize));
7356 __ vmov(s13, r7);
7357 __ vcvt_f64_s32(d6, s13);
7358 } else {
7359 // Write Smi from rhs to r3 and r2 in double format. r6 is scratch.
7360 __ mov(r7, Operand(rhs));
7361 ConvertToDoubleStub stub1(r3, r2, r7, r6);
7362 __ push(lr);
7363 __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
7364 // Write Smi from lhs to r1 and r0 in double format. r6 is scratch.
7365 __ mov(r7, Operand(lhs));
7366 ConvertToDoubleStub stub2(r1, r0, r7, r6);
7367 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
7368 __ pop(lr);
7369 }
7370 __ jmp(&do_the_call); // Tail call. No return.
Steve Blockd0582a62009-12-15 09:54:21 +00007371 }
7372
Steve Block6ded16b2010-05-10 14:33:55 +01007373 // We branch here if at least one of r0 and r1 is not a Smi.
7374 __ bind(not_smi);
7375
7376 // After this point we have the left hand side in r1 and the right hand side
7377 // in r0.
7378 if (lhs.is(r0)) {
7379 __ Swap(r0, r1, ip);
7380 }
7381
7382 if (ShouldGenerateFPCode()) {
7383 Label r0_is_smi, r1_is_smi, finished_loading_r0, finished_loading_r1;
7384
7385 if (runtime_operands_type_ == BinaryOpIC::DEFAULT) {
7386 switch (op_) {
7387 case Token::ADD:
7388 case Token::SUB:
7389 case Token::MUL:
7390 case Token::DIV:
7391 GenerateTypeTransition(masm);
7392 break;
7393
7394 default:
7395 break;
7396 }
7397 }
7398
7399 if (mode_ == NO_OVERWRITE) {
7400 // In the case where there is no chance of an overwritable float we may as
7401 // well do the allocation immediately while r0 and r1 are untouched.
7402 __ AllocateHeapNumber(r5, r6, r7, &slow);
7403 }
7404
7405 // Move r0 to a double in r2-r3.
7406 __ tst(r0, Operand(kSmiTagMask));
7407 __ b(eq, &r0_is_smi); // It's a Smi so don't check it's a heap number.
7408 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
7409 __ b(ne, &slow);
7410 if (mode_ == OVERWRITE_RIGHT) {
7411 __ mov(r5, Operand(r0)); // Overwrite this heap number.
7412 }
7413 if (use_fp_registers) {
7414 CpuFeatures::Scope scope(VFP3);
7415 // Load the double from tagged HeapNumber r0 to d7.
7416 __ sub(r7, r0, Operand(kHeapObjectTag));
7417 __ vldr(d7, r7, HeapNumber::kValueOffset);
7418 } else {
7419 // Calling convention says that second double is in r2 and r3.
Leon Clarkef7060e22010-06-03 12:02:55 +01007420 __ Ldrd(r2, r3, FieldMemOperand(r0, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007421 }
7422 __ jmp(&finished_loading_r0);
7423 __ bind(&r0_is_smi);
7424 if (mode_ == OVERWRITE_RIGHT) {
7425 // We can't overwrite a Smi so get address of new heap number into r5.
7426 __ AllocateHeapNumber(r5, r6, r7, &slow);
7427 }
7428
7429 if (use_fp_registers) {
7430 CpuFeatures::Scope scope(VFP3);
7431 // Convert smi in r0 to double in d7.
7432 __ mov(r7, Operand(r0, ASR, kSmiTagSize));
7433 __ vmov(s15, r7);
7434 __ vcvt_f64_s32(d7, s15);
7435 } else {
7436 // Write Smi from r0 to r3 and r2 in double format.
7437 __ mov(r7, Operand(r0));
7438 ConvertToDoubleStub stub3(r3, r2, r7, r6);
7439 __ push(lr);
7440 __ Call(stub3.GetCode(), RelocInfo::CODE_TARGET);
7441 __ pop(lr);
7442 }
7443
7444 // HEAP_NUMBERS stub is slower than GENERIC on a pair of smis.
7445 // r0 is known to be a smi. If r1 is also a smi then switch to GENERIC.
7446 Label r1_is_not_smi;
7447 if (runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) {
7448 __ tst(r1, Operand(kSmiTagMask));
7449 __ b(ne, &r1_is_not_smi);
7450 GenerateTypeTransition(masm);
7451 __ jmp(&r1_is_smi);
7452 }
7453
7454 __ bind(&finished_loading_r0);
7455
7456 // Move r1 to a double in r0-r1.
7457 __ tst(r1, Operand(kSmiTagMask));
7458 __ b(eq, &r1_is_smi); // It's a Smi so don't check it's a heap number.
7459 __ bind(&r1_is_not_smi);
7460 __ CompareObjectType(r1, r4, r4, HEAP_NUMBER_TYPE);
7461 __ b(ne, &slow);
7462 if (mode_ == OVERWRITE_LEFT) {
7463 __ mov(r5, Operand(r1)); // Overwrite this heap number.
7464 }
7465 if (use_fp_registers) {
7466 CpuFeatures::Scope scope(VFP3);
7467 // Load the double from tagged HeapNumber r1 to d6.
7468 __ sub(r7, r1, Operand(kHeapObjectTag));
7469 __ vldr(d6, r7, HeapNumber::kValueOffset);
7470 } else {
7471 // Calling convention says that first double is in r0 and r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01007472 __ Ldrd(r0, r1, FieldMemOperand(r1, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007473 }
7474 __ jmp(&finished_loading_r1);
7475 __ bind(&r1_is_smi);
7476 if (mode_ == OVERWRITE_LEFT) {
7477 // We can't overwrite a Smi so get address of new heap number into r5.
7478 __ AllocateHeapNumber(r5, r6, r7, &slow);
7479 }
7480
7481 if (use_fp_registers) {
7482 CpuFeatures::Scope scope(VFP3);
7483 // Convert smi in r1 to double in d6.
7484 __ mov(r7, Operand(r1, ASR, kSmiTagSize));
7485 __ vmov(s13, r7);
7486 __ vcvt_f64_s32(d6, s13);
7487 } else {
7488 // Write Smi from r1 to r1 and r0 in double format.
7489 __ mov(r7, Operand(r1));
7490 ConvertToDoubleStub stub4(r1, r0, r7, r6);
7491 __ push(lr);
7492 __ Call(stub4.GetCode(), RelocInfo::CODE_TARGET);
7493 __ pop(lr);
7494 }
7495
7496 __ bind(&finished_loading_r1);
7497
7498 __ bind(&do_the_call);
7499 // If we are inlining the operation using VFP3 instructions for
7500 // add, subtract, multiply, or divide, the arguments are in d6 and d7.
7501 if (use_fp_registers) {
7502 CpuFeatures::Scope scope(VFP3);
7503 // ARMv7 VFP3 instructions to implement
7504 // double precision, add, subtract, multiply, divide.
7505
7506 if (Token::MUL == op_) {
7507 __ vmul(d5, d6, d7);
7508 } else if (Token::DIV == op_) {
7509 __ vdiv(d5, d6, d7);
7510 } else if (Token::ADD == op_) {
7511 __ vadd(d5, d6, d7);
7512 } else if (Token::SUB == op_) {
7513 __ vsub(d5, d6, d7);
7514 } else {
7515 UNREACHABLE();
7516 }
7517 __ sub(r0, r5, Operand(kHeapObjectTag));
7518 __ vstr(d5, r0, HeapNumber::kValueOffset);
7519 __ add(r0, r0, Operand(kHeapObjectTag));
7520 __ mov(pc, lr);
7521 } else {
7522 // If we did not inline the operation, then the arguments are in:
7523 // r0: Left value (least significant part of mantissa).
7524 // r1: Left value (sign, exponent, top of mantissa).
7525 // r2: Right value (least significant part of mantissa).
7526 // r3: Right value (sign, exponent, top of mantissa).
7527 // r5: Address of heap number for result.
7528
7529 __ push(lr); // For later.
7530 __ PrepareCallCFunction(4, r4); // Two doubles count as 4 arguments.
7531 // Call C routine that may not cause GC or other trouble. r5 is callee
7532 // save.
7533 __ CallCFunction(ExternalReference::double_fp_operation(op_), 4);
7534 // Store answer in the overwritable heap number.
7535 #if !defined(USE_ARM_EABI)
7536 // Double returned in fp coprocessor register 0 and 1, encoded as register
7537 // cr8. Offsets must be divisible by 4 for coprocessor so we need to
7538 // substract the tag from r5.
7539 __ sub(r4, r5, Operand(kHeapObjectTag));
7540 __ stc(p1, cr8, MemOperand(r4, HeapNumber::kValueOffset));
7541 #else
7542 // Double returned in registers 0 and 1.
Leon Clarkef7060e22010-06-03 12:02:55 +01007543 __ Strd(r0, r1, FieldMemOperand(r5, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007544 #endif
7545 __ mov(r0, Operand(r5));
7546 // And we are done.
7547 __ pop(pc);
7548 }
7549 }
7550
7551
7552 if (lhs.is(r0)) {
7553 __ b(&slow);
7554 __ bind(&slow_reverse);
7555 __ Swap(r0, r1, ip);
7556 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007557
7558 // We jump to here if something goes wrong (one param is not a number of any
7559 // sort or new-space allocation fails).
7560 __ bind(&slow);
Steve Blockd0582a62009-12-15 09:54:21 +00007561
7562 // Push arguments to the stack
Steve Block6ded16b2010-05-10 14:33:55 +01007563 __ Push(r1, r0);
Steve Blockd0582a62009-12-15 09:54:21 +00007564
Steve Block6ded16b2010-05-10 14:33:55 +01007565 if (Token::ADD == op_) {
Steve Blockd0582a62009-12-15 09:54:21 +00007566 // Test for string arguments before calling runtime.
7567 // r1 : first argument
7568 // r0 : second argument
7569 // sp[0] : second argument
Andrei Popescu31002712010-02-23 13:46:05 +00007570 // sp[4] : first argument
Steve Blockd0582a62009-12-15 09:54:21 +00007571
Steve Block6ded16b2010-05-10 14:33:55 +01007572 Label not_strings, not_string1, string1, string1_smi2;
Steve Blockd0582a62009-12-15 09:54:21 +00007573 __ tst(r1, Operand(kSmiTagMask));
7574 __ b(eq, &not_string1);
7575 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
7576 __ b(ge, &not_string1);
7577
7578 // First argument is a a string, test second.
7579 __ tst(r0, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007580 __ b(eq, &string1_smi2);
Steve Blockd0582a62009-12-15 09:54:21 +00007581 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
7582 __ b(ge, &string1);
7583
7584 // First and second argument are strings.
Steve Block6ded16b2010-05-10 14:33:55 +01007585 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
7586 __ TailCallStub(&string_add_stub);
7587
7588 __ bind(&string1_smi2);
7589 // First argument is a string, second is a smi. Try to lookup the number
7590 // string for the smi in the number string cache.
7591 NumberToStringStub::GenerateLookupNumberStringCache(
7592 masm, r0, r2, r4, r5, r6, true, &string1);
7593
7594 // Replace second argument on stack and tailcall string add stub to make
7595 // the result.
7596 __ str(r2, MemOperand(sp, 0));
7597 __ TailCallStub(&string_add_stub);
Steve Blockd0582a62009-12-15 09:54:21 +00007598
7599 // Only first argument is a string.
7600 __ bind(&string1);
7601 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_JS);
7602
7603 // First argument was not a string, test second.
7604 __ bind(&not_string1);
7605 __ tst(r0, Operand(kSmiTagMask));
7606 __ b(eq, &not_strings);
7607 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
7608 __ b(ge, &not_strings);
7609
7610 // Only second argument is a string.
Steve Blockd0582a62009-12-15 09:54:21 +00007611 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_JS);
7612
7613 __ bind(&not_strings);
7614 }
7615
Steve Blocka7e24c12009-10-30 11:49:00 +00007616 __ InvokeBuiltin(builtin, JUMP_JS); // Tail call. No return.
Steve Blocka7e24c12009-10-30 11:49:00 +00007617}
7618
7619
7620// Tries to get a signed int32 out of a double precision floating point heap
7621// number. Rounds towards 0. Fastest for doubles that are in the ranges
7622// -0x7fffffff to -0x40000000 or 0x40000000 to 0x7fffffff. This corresponds
7623// almost to the range of signed int32 values that are not Smis. Jumps to the
7624// label 'slow' if the double isn't in the range -0x80000000.0 to 0x80000000.0
7625// (excluding the endpoints).
7626static void GetInt32(MacroAssembler* masm,
7627 Register source,
7628 Register dest,
7629 Register scratch,
7630 Register scratch2,
7631 Label* slow) {
7632 Label right_exponent, done;
7633 // Get exponent word.
7634 __ ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
7635 // Get exponent alone in scratch2.
7636 __ and_(scratch2, scratch, Operand(HeapNumber::kExponentMask));
7637 // Load dest with zero. We use this either for the final shift or
7638 // for the answer.
7639 __ mov(dest, Operand(0));
7640 // Check whether the exponent matches a 32 bit signed int that is not a Smi.
7641 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
7642 // the exponent that we are fastest at and also the highest exponent we can
7643 // handle here.
7644 const uint32_t non_smi_exponent =
7645 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
7646 __ cmp(scratch2, Operand(non_smi_exponent));
7647 // If we have a match of the int32-but-not-Smi exponent then skip some logic.
7648 __ b(eq, &right_exponent);
7649 // If the exponent is higher than that then go to slow case. This catches
7650 // numbers that don't fit in a signed int32, infinities and NaNs.
7651 __ b(gt, slow);
7652
7653 // We know the exponent is smaller than 30 (biased). If it is less than
7654 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
7655 // it rounds to zero.
7656 const uint32_t zero_exponent =
7657 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
7658 __ sub(scratch2, scratch2, Operand(zero_exponent), SetCC);
7659 // Dest already has a Smi zero.
7660 __ b(lt, &done);
Steve Blockd0582a62009-12-15 09:54:21 +00007661 if (!CpuFeatures::IsSupported(VFP3)) {
7662 // We have a shifted exponent between 0 and 30 in scratch2.
7663 __ mov(dest, Operand(scratch2, LSR, HeapNumber::kExponentShift));
7664 // We now have the exponent in dest. Subtract from 30 to get
7665 // how much to shift down.
7666 __ rsb(dest, dest, Operand(30));
7667 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007668 __ bind(&right_exponent);
Steve Blockd0582a62009-12-15 09:54:21 +00007669 if (CpuFeatures::IsSupported(VFP3)) {
7670 CpuFeatures::Scope scope(VFP3);
7671 // ARMv7 VFP3 instructions implementing double precision to integer
7672 // conversion using round to zero.
7673 __ ldr(scratch2, FieldMemOperand(source, HeapNumber::kMantissaOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00007674 __ vmov(d7, scratch2, scratch);
Steve Block6ded16b2010-05-10 14:33:55 +01007675 __ vcvt_s32_f64(s15, d7);
Leon Clarkee46be812010-01-19 14:06:41 +00007676 __ vmov(dest, s15);
Steve Blockd0582a62009-12-15 09:54:21 +00007677 } else {
7678 // Get the top bits of the mantissa.
7679 __ and_(scratch2, scratch, Operand(HeapNumber::kMantissaMask));
7680 // Put back the implicit 1.
7681 __ orr(scratch2, scratch2, Operand(1 << HeapNumber::kExponentShift));
7682 // Shift up the mantissa bits to take up the space the exponent used to
7683 // take. We just orred in the implicit bit so that took care of one and
7684 // we want to leave the sign bit 0 so we subtract 2 bits from the shift
7685 // distance.
7686 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
7687 __ mov(scratch2, Operand(scratch2, LSL, shift_distance));
7688 // Put sign in zero flag.
7689 __ tst(scratch, Operand(HeapNumber::kSignMask));
7690 // Get the second half of the double. For some exponents we don't
7691 // actually need this because the bits get shifted out again, but
7692 // it's probably slower to test than just to do it.
7693 __ ldr(scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset));
7694 // Shift down 22 bits to get the last 10 bits.
7695 __ orr(scratch, scratch2, Operand(scratch, LSR, 32 - shift_distance));
7696 // Move down according to the exponent.
7697 __ mov(dest, Operand(scratch, LSR, dest));
7698 // Fix sign if sign bit was set.
7699 __ rsb(dest, dest, Operand(0), LeaveCC, ne);
7700 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007701 __ bind(&done);
7702}
7703
Steve Blocka7e24c12009-10-30 11:49:00 +00007704// For bitwise ops where the inputs are not both Smis we here try to determine
7705// whether both inputs are either Smis or at least heap numbers that can be
7706// represented by a 32 bit signed value. We truncate towards zero as required
7707// by the ES spec. If this is the case we do the bitwise op and see if the
7708// result is a Smi. If so, great, otherwise we try to find a heap number to
7709// write the answer into (either by allocating or by overwriting).
Steve Block6ded16b2010-05-10 14:33:55 +01007710// On entry the operands are in lhs and rhs. On exit the answer is in r0.
7711void GenericBinaryOpStub::HandleNonSmiBitwiseOp(MacroAssembler* masm,
7712 Register lhs,
7713 Register rhs) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007714 Label slow, result_not_a_smi;
Steve Block6ded16b2010-05-10 14:33:55 +01007715 Label rhs_is_smi, lhs_is_smi;
7716 Label done_checking_rhs, done_checking_lhs;
Steve Blocka7e24c12009-10-30 11:49:00 +00007717
Steve Block6ded16b2010-05-10 14:33:55 +01007718 __ tst(lhs, Operand(kSmiTagMask));
7719 __ b(eq, &lhs_is_smi); // It's a Smi so don't check it's a heap number.
7720 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007721 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01007722 GetInt32(masm, lhs, r3, r5, r4, &slow);
7723 __ jmp(&done_checking_lhs);
7724 __ bind(&lhs_is_smi);
7725 __ mov(r3, Operand(lhs, ASR, 1));
7726 __ bind(&done_checking_lhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00007727
Steve Block6ded16b2010-05-10 14:33:55 +01007728 __ tst(rhs, Operand(kSmiTagMask));
7729 __ b(eq, &rhs_is_smi); // It's a Smi so don't check it's a heap number.
7730 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007731 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01007732 GetInt32(masm, rhs, r2, r5, r4, &slow);
7733 __ jmp(&done_checking_rhs);
7734 __ bind(&rhs_is_smi);
7735 __ mov(r2, Operand(rhs, ASR, 1));
7736 __ bind(&done_checking_rhs);
7737
7738 ASSERT(((lhs.is(r0) && rhs.is(r1)) || (lhs.is(r1) && rhs.is(r0))));
Steve Blocka7e24c12009-10-30 11:49:00 +00007739
7740 // r0 and r1: Original operands (Smi or heap numbers).
7741 // r2 and r3: Signed int32 operands.
7742 switch (op_) {
7743 case Token::BIT_OR: __ orr(r2, r2, Operand(r3)); break;
7744 case Token::BIT_XOR: __ eor(r2, r2, Operand(r3)); break;
7745 case Token::BIT_AND: __ and_(r2, r2, Operand(r3)); break;
7746 case Token::SAR:
7747 // Use only the 5 least significant bits of the shift count.
7748 __ and_(r2, r2, Operand(0x1f));
7749 __ mov(r2, Operand(r3, ASR, r2));
7750 break;
7751 case Token::SHR:
7752 // Use only the 5 least significant bits of the shift count.
7753 __ and_(r2, r2, Operand(0x1f));
7754 __ mov(r2, Operand(r3, LSR, r2), SetCC);
7755 // SHR is special because it is required to produce a positive answer.
7756 // The code below for writing into heap numbers isn't capable of writing
7757 // the register as an unsigned int so we go to slow case if we hit this
7758 // case.
7759 __ b(mi, &slow);
7760 break;
7761 case Token::SHL:
7762 // Use only the 5 least significant bits of the shift count.
7763 __ and_(r2, r2, Operand(0x1f));
7764 __ mov(r2, Operand(r3, LSL, r2));
7765 break;
7766 default: UNREACHABLE();
7767 }
7768 // check that the *signed* result fits in a smi
7769 __ add(r3, r2, Operand(0x40000000), SetCC);
7770 __ b(mi, &result_not_a_smi);
7771 __ mov(r0, Operand(r2, LSL, kSmiTagSize));
7772 __ Ret();
7773
7774 Label have_to_allocate, got_a_heap_number;
7775 __ bind(&result_not_a_smi);
7776 switch (mode_) {
7777 case OVERWRITE_RIGHT: {
Steve Block6ded16b2010-05-10 14:33:55 +01007778 __ tst(rhs, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00007779 __ b(eq, &have_to_allocate);
Steve Block6ded16b2010-05-10 14:33:55 +01007780 __ mov(r5, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00007781 break;
7782 }
7783 case OVERWRITE_LEFT: {
Steve Block6ded16b2010-05-10 14:33:55 +01007784 __ tst(lhs, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00007785 __ b(eq, &have_to_allocate);
Steve Block6ded16b2010-05-10 14:33:55 +01007786 __ mov(r5, Operand(lhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00007787 break;
7788 }
7789 case NO_OVERWRITE: {
7790 // Get a new heap number in r5. r6 and r7 are scratch.
Steve Block6ded16b2010-05-10 14:33:55 +01007791 __ AllocateHeapNumber(r5, r6, r7, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007792 }
7793 default: break;
7794 }
7795 __ bind(&got_a_heap_number);
7796 // r2: Answer as signed int32.
7797 // r5: Heap number to write answer into.
7798
7799 // Nothing can go wrong now, so move the heap number to r0, which is the
7800 // result.
7801 __ mov(r0, Operand(r5));
7802
7803 // Tail call that writes the int32 in r2 to the heap number in r0, using
7804 // r3 as scratch. r0 is preserved and returned.
7805 WriteInt32ToHeapNumberStub stub(r2, r0, r3);
7806 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
7807
7808 if (mode_ != NO_OVERWRITE) {
7809 __ bind(&have_to_allocate);
7810 // Get a new heap number in r5. r6 and r7 are scratch.
Steve Block6ded16b2010-05-10 14:33:55 +01007811 __ AllocateHeapNumber(r5, r6, r7, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007812 __ jmp(&got_a_heap_number);
7813 }
7814
7815 // If all else failed then we go to the runtime system.
7816 __ bind(&slow);
Steve Block6ded16b2010-05-10 14:33:55 +01007817 __ Push(lhs, rhs); // Restore stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00007818 switch (op_) {
7819 case Token::BIT_OR:
7820 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
7821 break;
7822 case Token::BIT_AND:
7823 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
7824 break;
7825 case Token::BIT_XOR:
7826 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
7827 break;
7828 case Token::SAR:
7829 __ InvokeBuiltin(Builtins::SAR, JUMP_JS);
7830 break;
7831 case Token::SHR:
7832 __ InvokeBuiltin(Builtins::SHR, JUMP_JS);
7833 break;
7834 case Token::SHL:
7835 __ InvokeBuiltin(Builtins::SHL, JUMP_JS);
7836 break;
7837 default:
7838 UNREACHABLE();
7839 }
7840}
7841
7842
7843// Can we multiply by x with max two shifts and an add.
7844// This answers yes to all integers from 2 to 10.
7845static bool IsEasyToMultiplyBy(int x) {
7846 if (x < 2) return false; // Avoid special cases.
7847 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
7848 if (IsPowerOf2(x)) return true; // Simple shift.
7849 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
7850 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
7851 return false;
7852}
7853
7854
7855// Can multiply by anything that IsEasyToMultiplyBy returns true for.
7856// Source and destination may be the same register. This routine does
7857// not set carry and overflow the way a mul instruction would.
7858static void MultiplyByKnownInt(MacroAssembler* masm,
7859 Register source,
7860 Register destination,
7861 int known_int) {
7862 if (IsPowerOf2(known_int)) {
7863 __ mov(destination, Operand(source, LSL, BitPosition(known_int)));
7864 } else if (PopCountLessThanEqual2(known_int)) {
7865 int first_bit = BitPosition(known_int);
7866 int second_bit = BitPosition(known_int ^ (1 << first_bit));
7867 __ add(destination, source, Operand(source, LSL, second_bit - first_bit));
7868 if (first_bit != 0) {
7869 __ mov(destination, Operand(destination, LSL, first_bit));
7870 }
7871 } else {
7872 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
7873 int the_bit = BitPosition(known_int + 1);
7874 __ rsb(destination, source, Operand(source, LSL, the_bit));
7875 }
7876}
7877
7878
7879// This function (as opposed to MultiplyByKnownInt) takes the known int in a
7880// a register for the cases where it doesn't know a good trick, and may deliver
7881// a result that needs shifting.
7882static void MultiplyByKnownInt2(
7883 MacroAssembler* masm,
7884 Register result,
7885 Register source,
7886 Register known_int_register, // Smi tagged.
7887 int known_int,
7888 int* required_shift) { // Including Smi tag shift
7889 switch (known_int) {
7890 case 3:
7891 __ add(result, source, Operand(source, LSL, 1));
7892 *required_shift = 1;
7893 break;
7894 case 5:
7895 __ add(result, source, Operand(source, LSL, 2));
7896 *required_shift = 1;
7897 break;
7898 case 6:
7899 __ add(result, source, Operand(source, LSL, 1));
7900 *required_shift = 2;
7901 break;
7902 case 7:
7903 __ rsb(result, source, Operand(source, LSL, 3));
7904 *required_shift = 1;
7905 break;
7906 case 9:
7907 __ add(result, source, Operand(source, LSL, 3));
7908 *required_shift = 1;
7909 break;
7910 case 10:
7911 __ add(result, source, Operand(source, LSL, 2));
7912 *required_shift = 2;
7913 break;
7914 default:
7915 ASSERT(!IsPowerOf2(known_int)); // That would be very inefficient.
7916 __ mul(result, source, known_int_register);
7917 *required_shift = 0;
7918 }
7919}
7920
7921
Leon Clarkee46be812010-01-19 14:06:41 +00007922const char* GenericBinaryOpStub::GetName() {
7923 if (name_ != NULL) return name_;
7924 const int len = 100;
7925 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7926 if (name_ == NULL) return "OOM";
7927 const char* op_name = Token::Name(op_);
7928 const char* overwrite_name;
7929 switch (mode_) {
7930 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7931 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7932 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7933 default: overwrite_name = "UnknownOverwrite"; break;
7934 }
7935
7936 OS::SNPrintF(Vector<char>(name_, len),
7937 "GenericBinaryOpStub_%s_%s%s",
7938 op_name,
7939 overwrite_name,
7940 specialized_on_rhs_ ? "_ConstantRhs" : 0);
7941 return name_;
7942}
7943
7944
Andrei Popescu31002712010-02-23 13:46:05 +00007945
Steve Blocka7e24c12009-10-30 11:49:00 +00007946void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01007947 // lhs_ : x
7948 // rhs_ : y
7949 // r0 : result
Steve Blocka7e24c12009-10-30 11:49:00 +00007950
Steve Block6ded16b2010-05-10 14:33:55 +01007951 Register result = r0;
7952 Register lhs = lhs_;
7953 Register rhs = rhs_;
7954
7955 // This code can't cope with other register allocations yet.
7956 ASSERT(result.is(r0) &&
7957 ((lhs.is(r0) && rhs.is(r1)) ||
7958 (lhs.is(r1) && rhs.is(r0))));
7959
7960 Register smi_test_reg = VirtualFrame::scratch0();
7961 Register scratch = VirtualFrame::scratch1();
7962
7963 // All ops need to know whether we are dealing with two Smis. Set up
7964 // smi_test_reg to tell us that.
7965 if (ShouldGenerateSmiCode()) {
7966 __ orr(smi_test_reg, lhs, Operand(rhs));
7967 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007968
7969 switch (op_) {
7970 case Token::ADD: {
7971 Label not_smi;
7972 // Fast path.
Steve Block6ded16b2010-05-10 14:33:55 +01007973 if (ShouldGenerateSmiCode()) {
7974 ASSERT(kSmiTag == 0); // Adjust code below.
7975 __ tst(smi_test_reg, Operand(kSmiTagMask));
7976 __ b(ne, &not_smi);
7977 __ add(r0, r1, Operand(r0), SetCC); // Add y optimistically.
7978 // Return if no overflow.
7979 __ Ret(vc);
7980 __ sub(r0, r0, Operand(r1)); // Revert optimistic add.
7981 }
7982 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::ADD);
Steve Blocka7e24c12009-10-30 11:49:00 +00007983 break;
7984 }
7985
7986 case Token::SUB: {
7987 Label not_smi;
7988 // Fast path.
Steve Block6ded16b2010-05-10 14:33:55 +01007989 if (ShouldGenerateSmiCode()) {
7990 ASSERT(kSmiTag == 0); // Adjust code below.
7991 __ tst(smi_test_reg, Operand(kSmiTagMask));
7992 __ b(ne, &not_smi);
7993 if (lhs.is(r1)) {
7994 __ sub(r0, r1, Operand(r0), SetCC); // Subtract y optimistically.
7995 // Return if no overflow.
7996 __ Ret(vc);
7997 __ sub(r0, r1, Operand(r0)); // Revert optimistic subtract.
7998 } else {
7999 __ sub(r0, r0, Operand(r1), SetCC); // Subtract y optimistically.
8000 // Return if no overflow.
8001 __ Ret(vc);
8002 __ add(r0, r0, Operand(r1)); // Revert optimistic subtract.
8003 }
8004 }
8005 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::SUB);
Steve Blocka7e24c12009-10-30 11:49:00 +00008006 break;
8007 }
8008
8009 case Token::MUL: {
8010 Label not_smi, slow;
Steve Block6ded16b2010-05-10 14:33:55 +01008011 if (ShouldGenerateSmiCode()) {
8012 ASSERT(kSmiTag == 0); // adjust code below
8013 __ tst(smi_test_reg, Operand(kSmiTagMask));
8014 Register scratch2 = smi_test_reg;
8015 smi_test_reg = no_reg;
8016 __ b(ne, &not_smi);
8017 // Remove tag from one operand (but keep sign), so that result is Smi.
8018 __ mov(ip, Operand(rhs, ASR, kSmiTagSize));
8019 // Do multiplication
8020 // scratch = lower 32 bits of ip * lhs.
8021 __ smull(scratch, scratch2, lhs, ip);
8022 // Go slow on overflows (overflow bit is not set).
8023 __ mov(ip, Operand(scratch, ASR, 31));
8024 // No overflow if higher 33 bits are identical.
8025 __ cmp(ip, Operand(scratch2));
8026 __ b(ne, &slow);
8027 // Go slow on zero result to handle -0.
8028 __ tst(scratch, Operand(scratch));
8029 __ mov(result, Operand(scratch), LeaveCC, ne);
8030 __ Ret(ne);
8031 // We need -0 if we were multiplying a negative number with 0 to get 0.
8032 // We know one of them was zero.
8033 __ add(scratch2, rhs, Operand(lhs), SetCC);
8034 __ mov(result, Operand(Smi::FromInt(0)), LeaveCC, pl);
8035 __ Ret(pl); // Return Smi 0 if the non-zero one was positive.
8036 // Slow case. We fall through here if we multiplied a negative number
8037 // with 0, because that would mean we should produce -0.
8038 __ bind(&slow);
8039 }
8040 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::MUL);
Steve Blocka7e24c12009-10-30 11:49:00 +00008041 break;
8042 }
8043
8044 case Token::DIV:
8045 case Token::MOD: {
8046 Label not_smi;
Steve Block6ded16b2010-05-10 14:33:55 +01008047 if (ShouldGenerateSmiCode() && specialized_on_rhs_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008048 Label smi_is_unsuitable;
Steve Block6ded16b2010-05-10 14:33:55 +01008049 __ BranchOnNotSmi(lhs, &not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00008050 if (IsPowerOf2(constant_rhs_)) {
8051 if (op_ == Token::MOD) {
Steve Block6ded16b2010-05-10 14:33:55 +01008052 __ and_(rhs,
8053 lhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008054 Operand(0x80000000u | ((constant_rhs_ << kSmiTagSize) - 1)),
8055 SetCC);
8056 // We now have the answer, but if the input was negative we also
8057 // have the sign bit. Our work is done if the result is
8058 // positive or zero:
Steve Block6ded16b2010-05-10 14:33:55 +01008059 if (!rhs.is(r0)) {
8060 __ mov(r0, rhs, LeaveCC, pl);
8061 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008062 __ Ret(pl);
8063 // A mod of a negative left hand side must return a negative number.
8064 // Unfortunately if the answer is 0 then we must return -0. And we
Steve Block6ded16b2010-05-10 14:33:55 +01008065 // already optimistically trashed rhs so we may need to restore it.
8066 __ eor(rhs, rhs, Operand(0x80000000u), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00008067 // Next two instructions are conditional on the answer being -0.
Steve Block6ded16b2010-05-10 14:33:55 +01008068 __ mov(rhs, Operand(Smi::FromInt(constant_rhs_)), LeaveCC, eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00008069 __ b(eq, &smi_is_unsuitable);
8070 // We need to subtract the dividend. Eg. -3 % 4 == -3.
Steve Block6ded16b2010-05-10 14:33:55 +01008071 __ sub(result, rhs, Operand(Smi::FromInt(constant_rhs_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00008072 } else {
8073 ASSERT(op_ == Token::DIV);
Steve Block6ded16b2010-05-10 14:33:55 +01008074 __ tst(lhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008075 Operand(0x80000000u | ((constant_rhs_ << kSmiTagSize) - 1)));
8076 __ b(ne, &smi_is_unsuitable); // Go slow on negative or remainder.
8077 int shift = 0;
8078 int d = constant_rhs_;
8079 while ((d & 1) == 0) {
8080 d >>= 1;
8081 shift++;
8082 }
Steve Block6ded16b2010-05-10 14:33:55 +01008083 __ mov(r0, Operand(lhs, LSR, shift));
Steve Blocka7e24c12009-10-30 11:49:00 +00008084 __ bic(r0, r0, Operand(kSmiTagMask));
8085 }
8086 } else {
8087 // Not a power of 2.
Steve Block6ded16b2010-05-10 14:33:55 +01008088 __ tst(lhs, Operand(0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00008089 __ b(ne, &smi_is_unsuitable);
8090 // Find a fixed point reciprocal of the divisor so we can divide by
8091 // multiplying.
8092 double divisor = 1.0 / constant_rhs_;
8093 int shift = 32;
8094 double scale = 4294967296.0; // 1 << 32.
8095 uint32_t mul;
8096 // Maximise the precision of the fixed point reciprocal.
8097 while (true) {
8098 mul = static_cast<uint32_t>(scale * divisor);
8099 if (mul >= 0x7fffffff) break;
8100 scale *= 2.0;
8101 shift++;
8102 }
8103 mul++;
Steve Block6ded16b2010-05-10 14:33:55 +01008104 Register scratch2 = smi_test_reg;
8105 smi_test_reg = no_reg;
8106 __ mov(scratch2, Operand(mul));
8107 __ umull(scratch, scratch2, scratch2, lhs);
8108 __ mov(scratch2, Operand(scratch2, LSR, shift - 31));
8109 // scratch2 is lhs / rhs. scratch2 is not Smi tagged.
8110 // rhs is still the known rhs. rhs is Smi tagged.
8111 // lhs is still the unkown lhs. lhs is Smi tagged.
8112 int required_scratch_shift = 0; // Including the Smi tag shift of 1.
8113 // scratch = scratch2 * rhs.
Steve Blocka7e24c12009-10-30 11:49:00 +00008114 MultiplyByKnownInt2(masm,
Steve Block6ded16b2010-05-10 14:33:55 +01008115 scratch,
8116 scratch2,
8117 rhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008118 constant_rhs_,
Steve Block6ded16b2010-05-10 14:33:55 +01008119 &required_scratch_shift);
8120 // scratch << required_scratch_shift is now the Smi tagged rhs *
8121 // (lhs / rhs) where / indicates integer division.
Steve Blocka7e24c12009-10-30 11:49:00 +00008122 if (op_ == Token::DIV) {
Steve Block6ded16b2010-05-10 14:33:55 +01008123 __ cmp(lhs, Operand(scratch, LSL, required_scratch_shift));
Steve Blocka7e24c12009-10-30 11:49:00 +00008124 __ b(ne, &smi_is_unsuitable); // There was a remainder.
Steve Block6ded16b2010-05-10 14:33:55 +01008125 __ mov(result, Operand(scratch2, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008126 } else {
8127 ASSERT(op_ == Token::MOD);
Steve Block6ded16b2010-05-10 14:33:55 +01008128 __ sub(result, lhs, Operand(scratch, LSL, required_scratch_shift));
Steve Blocka7e24c12009-10-30 11:49:00 +00008129 }
8130 }
8131 __ Ret();
8132 __ bind(&smi_is_unsuitable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008133 }
Steve Block6ded16b2010-05-10 14:33:55 +01008134 HandleBinaryOpSlowCases(
8135 masm,
8136 &not_smi,
8137 lhs,
8138 rhs,
8139 op_ == Token::MOD ? Builtins::MOD : Builtins::DIV);
Steve Blocka7e24c12009-10-30 11:49:00 +00008140 break;
8141 }
8142
8143 case Token::BIT_OR:
8144 case Token::BIT_AND:
8145 case Token::BIT_XOR:
8146 case Token::SAR:
8147 case Token::SHR:
8148 case Token::SHL: {
8149 Label slow;
8150 ASSERT(kSmiTag == 0); // adjust code below
Steve Block6ded16b2010-05-10 14:33:55 +01008151 __ tst(smi_test_reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008152 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008153 Register scratch2 = smi_test_reg;
8154 smi_test_reg = no_reg;
Steve Blocka7e24c12009-10-30 11:49:00 +00008155 switch (op_) {
Steve Block6ded16b2010-05-10 14:33:55 +01008156 case Token::BIT_OR: __ orr(result, rhs, Operand(lhs)); break;
8157 case Token::BIT_AND: __ and_(result, rhs, Operand(lhs)); break;
8158 case Token::BIT_XOR: __ eor(result, rhs, Operand(lhs)); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008159 case Token::SAR:
8160 // Remove tags from right operand.
Steve Block6ded16b2010-05-10 14:33:55 +01008161 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8162 __ mov(result, Operand(lhs, ASR, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008163 // Smi tag result.
Steve Block6ded16b2010-05-10 14:33:55 +01008164 __ bic(result, result, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008165 break;
8166 case Token::SHR:
8167 // Remove tags from operands. We can't do this on a 31 bit number
8168 // because then the 0s get shifted into bit 30 instead of bit 31.
Steve Block6ded16b2010-05-10 14:33:55 +01008169 __ mov(scratch, Operand(lhs, ASR, kSmiTagSize)); // x
8170 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8171 __ mov(scratch, Operand(scratch, LSR, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008172 // Unsigned shift is not allowed to produce a negative number, so
8173 // check the sign bit and the sign bit after Smi tagging.
Steve Block6ded16b2010-05-10 14:33:55 +01008174 __ tst(scratch, Operand(0xc0000000));
Steve Blocka7e24c12009-10-30 11:49:00 +00008175 __ b(ne, &slow);
8176 // Smi tag result.
Steve Block6ded16b2010-05-10 14:33:55 +01008177 __ mov(result, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008178 break;
8179 case Token::SHL:
8180 // Remove tags from operands.
Steve Block6ded16b2010-05-10 14:33:55 +01008181 __ mov(scratch, Operand(lhs, ASR, kSmiTagSize)); // x
8182 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8183 __ mov(scratch, Operand(scratch, LSL, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008184 // Check that the signed result fits in a Smi.
Steve Block6ded16b2010-05-10 14:33:55 +01008185 __ add(scratch2, scratch, Operand(0x40000000), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00008186 __ b(mi, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008187 __ mov(result, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008188 break;
8189 default: UNREACHABLE();
8190 }
8191 __ Ret();
8192 __ bind(&slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008193 HandleNonSmiBitwiseOp(masm, lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00008194 break;
8195 }
8196
8197 default: UNREACHABLE();
8198 }
8199 // This code should be unreachable.
8200 __ stop("Unreachable");
Steve Block6ded16b2010-05-10 14:33:55 +01008201
8202 // Generate an unreachable reference to the DEFAULT stub so that it can be
8203 // found at the end of this stub when clearing ICs at GC.
8204 // TODO(kaznacheev): Check performance impact and get rid of this.
8205 if (runtime_operands_type_ != BinaryOpIC::DEFAULT) {
8206 GenericBinaryOpStub uninit(MinorKey(), BinaryOpIC::DEFAULT);
8207 __ CallStub(&uninit);
8208 }
8209}
8210
8211
8212void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
8213 Label get_result;
8214
8215 __ Push(r1, r0);
8216
8217 // Internal frame is necessary to handle exceptions properly.
8218 __ EnterInternalFrame();
8219 // Call the stub proper to get the result in r0.
8220 __ Call(&get_result);
8221 __ LeaveInternalFrame();
8222
8223 __ push(r0);
8224
8225 __ mov(r0, Operand(Smi::FromInt(MinorKey())));
8226 __ push(r0);
8227 __ mov(r0, Operand(Smi::FromInt(op_)));
8228 __ push(r0);
8229 __ mov(r0, Operand(Smi::FromInt(runtime_operands_type_)));
8230 __ push(r0);
8231
8232 __ TailCallExternalReference(
8233 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
8234 6,
8235 1);
8236
8237 // The entry point for the result calculation is assumed to be immediately
8238 // after this sequence.
8239 __ bind(&get_result);
8240}
8241
8242
8243Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
8244 GenericBinaryOpStub stub(key, type_info);
8245 return stub.GetCode();
Steve Blocka7e24c12009-10-30 11:49:00 +00008246}
8247
8248
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008249void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
8250 // Argument is a number and is on stack and in r0.
8251 Label runtime_call;
8252 Label input_not_smi;
8253 Label loaded;
8254
8255 if (CpuFeatures::IsSupported(VFP3)) {
8256 // Load argument and check if it is a smi.
8257 __ BranchOnNotSmi(r0, &input_not_smi);
8258
8259 CpuFeatures::Scope scope(VFP3);
8260 // Input is a smi. Convert to double and load the low and high words
8261 // of the double into r2, r3.
8262 __ IntegerToDoubleConversionWithVFP3(r0, r3, r2);
8263 __ b(&loaded);
8264
8265 __ bind(&input_not_smi);
8266 // Check if input is a HeapNumber.
8267 __ CheckMap(r0,
8268 r1,
8269 Heap::kHeapNumberMapRootIndex,
8270 &runtime_call,
8271 true);
8272 // Input is a HeapNumber. Load it to a double register and store the
8273 // low and high words into r2, r3.
8274 __ Ldrd(r2, r3, FieldMemOperand(r0, HeapNumber::kValueOffset));
8275
8276 __ bind(&loaded);
8277 // r2 = low 32 bits of double value
8278 // r3 = high 32 bits of double value
8279 // Compute hash:
8280 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
8281 __ eor(r1, r2, Operand(r3));
8282 __ eor(r1, r1, Operand(r1, LSR, 16));
8283 __ eor(r1, r1, Operand(r1, LSR, 8));
8284 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
8285 if (CpuFeatures::IsSupported(ARMv7)) {
8286 const int kTranscendentalCacheSizeBits = 9;
8287 ASSERT_EQ(1 << kTranscendentalCacheSizeBits,
8288 TranscendentalCache::kCacheSize);
8289 __ ubfx(r1, r1, 0, kTranscendentalCacheSizeBits);
8290 } else {
8291 __ and_(r1, r1, Operand(TranscendentalCache::kCacheSize - 1));
8292 }
8293
8294 // r2 = low 32 bits of double value.
8295 // r3 = high 32 bits of double value.
8296 // r1 = TranscendentalCache::hash(double value).
8297 __ mov(r0,
8298 Operand(ExternalReference::transcendental_cache_array_address()));
8299 // r0 points to cache array.
8300 __ ldr(r0, MemOperand(r0, type_ * sizeof(TranscendentalCache::caches_[0])));
8301 // r0 points to the cache for the type type_.
8302 // If NULL, the cache hasn't been initialized yet, so go through runtime.
8303 __ cmp(r0, Operand(0));
8304 __ b(eq, &runtime_call);
8305
8306#ifdef DEBUG
8307 // Check that the layout of cache elements match expectations.
8308 { TranscendentalCache::Element test_elem[2];
8309 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
8310 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
8311 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
8312 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
8313 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
8314 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
8315 CHECK_EQ(0, elem_in0 - elem_start);
8316 CHECK_EQ(kIntSize, elem_in1 - elem_start);
8317 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
8318 }
8319#endif
8320
8321 // Find the address of the r1'st entry in the cache, i.e., &r0[r1*12].
8322 __ add(r1, r1, Operand(r1, LSL, 1));
8323 __ add(r0, r0, Operand(r1, LSL, 2));
8324 // Check if cache matches: Double value is stored in uint32_t[2] array.
8325 __ ldm(ia, r0, r4.bit()| r5.bit() | r6.bit());
8326 __ cmp(r2, r4);
8327 __ b(ne, &runtime_call);
8328 __ cmp(r3, r5);
8329 __ b(ne, &runtime_call);
8330 // Cache hit. Load result, pop argument and return.
8331 __ mov(r0, Operand(r6));
8332 __ pop();
8333 __ Ret();
8334 }
8335
8336 __ bind(&runtime_call);
8337 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
8338}
8339
8340
8341Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
8342 switch (type_) {
8343 // Add more cases when necessary.
8344 case TranscendentalCache::SIN: return Runtime::kMath_sin;
8345 case TranscendentalCache::COS: return Runtime::kMath_cos;
8346 default:
8347 UNIMPLEMENTED();
8348 return Runtime::kAbort;
8349 }
8350}
8351
8352
Steve Blocka7e24c12009-10-30 11:49:00 +00008353void StackCheckStub::Generate(MacroAssembler* masm) {
8354 // Do tail-call to runtime routine. Runtime routines expect at least one
8355 // argument, so give it a Smi.
8356 __ mov(r0, Operand(Smi::FromInt(0)));
8357 __ push(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01008358 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008359
8360 __ StubReturn(1);
8361}
8362
8363
Leon Clarkee46be812010-01-19 14:06:41 +00008364void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +00008365 Label slow, done;
Leon Clarkee46be812010-01-19 14:06:41 +00008366
Leon Clarke4515c472010-02-03 11:58:03 +00008367 if (op_ == Token::SUB) {
8368 // Check whether the value is a smi.
8369 Label try_float;
8370 __ tst(r0, Operand(kSmiTagMask));
8371 __ b(ne, &try_float);
Steve Blocka7e24c12009-10-30 11:49:00 +00008372
Leon Clarke4515c472010-02-03 11:58:03 +00008373 // Go slow case if the value of the expression is zero
8374 // to make sure that we switch between 0 and -0.
8375 __ cmp(r0, Operand(0));
8376 __ b(eq, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008377
Leon Clarke4515c472010-02-03 11:58:03 +00008378 // The value of the expression is a smi that is not zero. Try
8379 // optimistic subtraction '0 - value'.
8380 __ rsb(r1, r0, Operand(0), SetCC);
8381 __ b(vs, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008382
Leon Clarke4515c472010-02-03 11:58:03 +00008383 __ mov(r0, Operand(r1)); // Set r0 to result.
8384 __ b(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00008385
Leon Clarke4515c472010-02-03 11:58:03 +00008386 __ bind(&try_float);
8387 __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
8388 __ b(ne, &slow);
8389 // r0 is a heap number. Get a new heap number in r1.
8390 if (overwrite_) {
8391 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8392 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
8393 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8394 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008395 __ AllocateHeapNumber(r1, r2, r3, &slow);
Leon Clarke4515c472010-02-03 11:58:03 +00008396 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
8397 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8398 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
8399 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
8400 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
8401 __ mov(r0, Operand(r1));
8402 }
8403 } else if (op_ == Token::BIT_NOT) {
8404 // Check if the operand is a heap number.
8405 __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
8406 __ b(ne, &slow);
8407
8408 // Convert the heap number is r0 to an untagged integer in r1.
8409 GetInt32(masm, r0, r1, r2, r3, &slow);
8410
8411 // Do the bitwise operation (move negated) and check if the result
8412 // fits in a smi.
8413 Label try_float;
8414 __ mvn(r1, Operand(r1));
8415 __ add(r2, r1, Operand(0x40000000), SetCC);
8416 __ b(mi, &try_float);
8417 __ mov(r0, Operand(r1, LSL, kSmiTagSize));
8418 __ b(&done);
8419
8420 __ bind(&try_float);
8421 if (!overwrite_) {
8422 // Allocate a fresh heap number, but don't overwrite r0 until
8423 // we're sure we can do it without going through the slow case
8424 // that needs the value in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01008425 __ AllocateHeapNumber(r2, r3, r4, &slow);
Leon Clarke4515c472010-02-03 11:58:03 +00008426 __ mov(r0, Operand(r2));
8427 }
8428
8429 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not
8430 // have to set up a frame.
8431 WriteInt32ToHeapNumberStub stub(r1, r0, r2);
8432 __ push(lr);
8433 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
8434 __ pop(lr);
8435 } else {
8436 UNIMPLEMENTED();
8437 }
8438
8439 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00008440 __ StubReturn(1);
8441
Leon Clarke4515c472010-02-03 11:58:03 +00008442 // Handle the slow case by jumping to the JavaScript builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00008443 __ bind(&slow);
8444 __ push(r0);
Leon Clarke4515c472010-02-03 11:58:03 +00008445 switch (op_) {
8446 case Token::SUB:
8447 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
8448 break;
8449 case Token::BIT_NOT:
8450 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS);
8451 break;
8452 default:
8453 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008454 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008455}
8456
8457
8458void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
8459 // r0 holds the exception.
8460
8461 // Adjust this code if not the case.
8462 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
8463
8464 // Drop the sp to the top of the handler.
8465 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
8466 __ ldr(sp, MemOperand(r3));
8467
8468 // Restore the next handler and frame pointer, discard handler state.
8469 ASSERT(StackHandlerConstants::kNextOffset == 0);
8470 __ pop(r2);
8471 __ str(r2, MemOperand(r3));
8472 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
8473 __ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
8474
8475 // Before returning we restore the context from the frame pointer if
8476 // not NULL. The frame pointer is NULL in the exception handler of a
8477 // JS entry frame.
8478 __ cmp(fp, Operand(0));
8479 // Set cp to NULL if fp is NULL.
8480 __ mov(cp, Operand(0), LeaveCC, eq);
8481 // Restore cp otherwise.
8482 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
8483#ifdef DEBUG
8484 if (FLAG_debug_code) {
8485 __ mov(lr, Operand(pc));
8486 }
8487#endif
8488 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
8489 __ pop(pc);
8490}
8491
8492
8493void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
8494 UncatchableExceptionType type) {
8495 // Adjust this code if not the case.
8496 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
8497
8498 // Drop sp to the top stack handler.
8499 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
8500 __ ldr(sp, MemOperand(r3));
8501
8502 // Unwind the handlers until the ENTRY handler is found.
8503 Label loop, done;
8504 __ bind(&loop);
8505 // Load the type of the current stack handler.
8506 const int kStateOffset = StackHandlerConstants::kStateOffset;
8507 __ ldr(r2, MemOperand(sp, kStateOffset));
8508 __ cmp(r2, Operand(StackHandler::ENTRY));
8509 __ b(eq, &done);
8510 // Fetch the next handler in the list.
8511 const int kNextOffset = StackHandlerConstants::kNextOffset;
8512 __ ldr(sp, MemOperand(sp, kNextOffset));
8513 __ jmp(&loop);
8514 __ bind(&done);
8515
8516 // Set the top handler address to next handler past the current ENTRY handler.
8517 ASSERT(StackHandlerConstants::kNextOffset == 0);
8518 __ pop(r2);
8519 __ str(r2, MemOperand(r3));
8520
8521 if (type == OUT_OF_MEMORY) {
8522 // Set external caught exception to false.
8523 ExternalReference external_caught(Top::k_external_caught_exception_address);
8524 __ mov(r0, Operand(false));
8525 __ mov(r2, Operand(external_caught));
8526 __ str(r0, MemOperand(r2));
8527
8528 // Set pending exception and r0 to out of memory exception.
8529 Failure* out_of_memory = Failure::OutOfMemoryException();
8530 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
8531 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
8532 __ str(r0, MemOperand(r2));
8533 }
8534
8535 // Stack layout at this point. See also StackHandlerConstants.
8536 // sp -> state (ENTRY)
8537 // fp
8538 // lr
8539
8540 // Discard handler state (r2 is not used) and restore frame pointer.
8541 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
8542 __ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
8543 // Before returning we restore the context from the frame pointer if
8544 // not NULL. The frame pointer is NULL in the exception handler of a
8545 // JS entry frame.
8546 __ cmp(fp, Operand(0));
8547 // Set cp to NULL if fp is NULL.
8548 __ mov(cp, Operand(0), LeaveCC, eq);
8549 // Restore cp otherwise.
8550 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
8551#ifdef DEBUG
8552 if (FLAG_debug_code) {
8553 __ mov(lr, Operand(pc));
8554 }
8555#endif
8556 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
8557 __ pop(pc);
8558}
8559
8560
8561void CEntryStub::GenerateCore(MacroAssembler* masm,
8562 Label* throw_normal_exception,
8563 Label* throw_termination_exception,
8564 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00008565 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +01008566 bool always_allocate,
8567 int frame_alignment_skew) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008568 // r0: result parameter for PerformGC, if any
8569 // r4: number of arguments including receiver (C callee-saved)
8570 // r5: pointer to builtin function (C callee-saved)
8571 // r6: pointer to the first argument (C callee-saved)
8572
8573 if (do_gc) {
8574 // Passing r0.
Steve Block6ded16b2010-05-10 14:33:55 +01008575 __ PrepareCallCFunction(1, r1);
8576 __ CallCFunction(ExternalReference::perform_gc_function(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008577 }
8578
8579 ExternalReference scope_depth =
8580 ExternalReference::heap_always_allocate_scope_depth();
8581 if (always_allocate) {
8582 __ mov(r0, Operand(scope_depth));
8583 __ ldr(r1, MemOperand(r0));
8584 __ add(r1, r1, Operand(1));
8585 __ str(r1, MemOperand(r0));
8586 }
8587
8588 // Call C built-in.
8589 // r0 = argc, r1 = argv
8590 __ mov(r0, Operand(r4));
8591 __ mov(r1, Operand(r6));
8592
Steve Block6ded16b2010-05-10 14:33:55 +01008593 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
8594 int frame_alignment_mask = frame_alignment - 1;
8595#if defined(V8_HOST_ARCH_ARM)
8596 if (FLAG_debug_code) {
8597 if (frame_alignment > kPointerSize) {
8598 Label alignment_as_expected;
8599 ASSERT(IsPowerOf2(frame_alignment));
8600 __ sub(r2, sp, Operand(frame_alignment_skew));
8601 __ tst(r2, Operand(frame_alignment_mask));
8602 __ b(eq, &alignment_as_expected);
8603 // Don't use Check here, as it will call Runtime_Abort re-entering here.
8604 __ stop("Unexpected alignment");
8605 __ bind(&alignment_as_expected);
8606 }
8607 }
8608#endif
8609
8610 // Just before the call (jump) below lr is pushed, so the actual alignment is
8611 // adding one to the current skew.
8612 int alignment_before_call =
8613 (frame_alignment_skew + kPointerSize) & frame_alignment_mask;
8614 if (alignment_before_call > 0) {
8615 // Push until the alignment before the call is met.
8616 __ mov(r2, Operand(0));
8617 for (int i = alignment_before_call;
8618 (i & frame_alignment_mask) != 0;
8619 i += kPointerSize) {
8620 __ push(r2);
8621 }
8622 }
8623
Steve Blocka7e24c12009-10-30 11:49:00 +00008624 // TODO(1242173): To let the GC traverse the return address of the exit
8625 // frames, we need to know where the return address is. Right now,
8626 // we push it on the stack to be able to find it again, but we never
8627 // restore from it in case of changes, which makes it impossible to
8628 // support moving the C entry code stub. This should be fixed, but currently
8629 // this is OK because the CEntryStub gets generated so early in the V8 boot
8630 // sequence that it is not moving ever.
Steve Block6ded16b2010-05-10 14:33:55 +01008631 masm->add(lr, pc, Operand(4)); // Compute return address: (pc + 8) + 4
Steve Blocka7e24c12009-10-30 11:49:00 +00008632 masm->push(lr);
8633 masm->Jump(r5);
8634
Steve Block6ded16b2010-05-10 14:33:55 +01008635 // Restore sp back to before aligning the stack.
8636 if (alignment_before_call > 0) {
8637 __ add(sp, sp, Operand(alignment_before_call));
8638 }
8639
Steve Blocka7e24c12009-10-30 11:49:00 +00008640 if (always_allocate) {
8641 // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
8642 // though (contain the result).
8643 __ mov(r2, Operand(scope_depth));
8644 __ ldr(r3, MemOperand(r2));
8645 __ sub(r3, r3, Operand(1));
8646 __ str(r3, MemOperand(r2));
8647 }
8648
8649 // check for failure result
8650 Label failure_returned;
8651 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
8652 // Lower 2 bits of r2 are 0 iff r0 has failure tag.
8653 __ add(r2, r0, Operand(1));
8654 __ tst(r2, Operand(kFailureTagMask));
8655 __ b(eq, &failure_returned);
8656
8657 // Exit C frame and return.
8658 // r0:r1: result
8659 // sp: stack pointer
8660 // fp: frame pointer
Leon Clarke4515c472010-02-03 11:58:03 +00008661 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00008662
8663 // check if we should retry or throw exception
8664 Label retry;
8665 __ bind(&failure_returned);
8666 ASSERT(Failure::RETRY_AFTER_GC == 0);
8667 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
8668 __ b(eq, &retry);
8669
8670 // Special handling of out of memory exceptions.
8671 Failure* out_of_memory = Failure::OutOfMemoryException();
8672 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
8673 __ b(eq, throw_out_of_memory_exception);
8674
8675 // Retrieve the pending exception and clear the variable.
8676 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
8677 __ ldr(r3, MemOperand(ip));
8678 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
8679 __ ldr(r0, MemOperand(ip));
8680 __ str(r3, MemOperand(ip));
8681
8682 // Special handling of termination exceptions which are uncatchable
8683 // by javascript code.
8684 __ cmp(r0, Operand(Factory::termination_exception()));
8685 __ b(eq, throw_termination_exception);
8686
8687 // Handle normal exception.
8688 __ jmp(throw_normal_exception);
8689
8690 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
8691}
8692
8693
Leon Clarke4515c472010-02-03 11:58:03 +00008694void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008695 // Called from JavaScript; parameters are on stack as if calling JS function
8696 // r0: number of arguments including receiver
8697 // r1: pointer to builtin function
8698 // fp: frame pointer (restored after C call)
8699 // sp: stack pointer (restored as callee's sp after C call)
8700 // cp: current context (C callee-saved)
8701
Leon Clarke4515c472010-02-03 11:58:03 +00008702 // Result returned in r0 or r0+r1 by default.
8703
Steve Blocka7e24c12009-10-30 11:49:00 +00008704 // NOTE: Invocations of builtins may return failure objects
8705 // instead of a proper result. The builtin entry handles
8706 // this by performing a garbage collection and retrying the
8707 // builtin once.
8708
Steve Blocka7e24c12009-10-30 11:49:00 +00008709 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +00008710 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00008711
8712 // r4: number of arguments (C callee-saved)
8713 // r5: pointer to builtin function (C callee-saved)
8714 // r6: pointer to first argument (C callee-saved)
8715
8716 Label throw_normal_exception;
8717 Label throw_termination_exception;
8718 Label throw_out_of_memory_exception;
8719
8720 // Call into the runtime system.
8721 GenerateCore(masm,
8722 &throw_normal_exception,
8723 &throw_termination_exception,
8724 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00008725 false,
Steve Block6ded16b2010-05-10 14:33:55 +01008726 false,
8727 -kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00008728
8729 // Do space-specific GC and retry runtime call.
8730 GenerateCore(masm,
8731 &throw_normal_exception,
8732 &throw_termination_exception,
8733 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00008734 true,
Steve Block6ded16b2010-05-10 14:33:55 +01008735 false,
8736 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00008737
8738 // Do full GC and retry runtime call one final time.
8739 Failure* failure = Failure::InternalError();
8740 __ mov(r0, Operand(reinterpret_cast<int32_t>(failure)));
8741 GenerateCore(masm,
8742 &throw_normal_exception,
8743 &throw_termination_exception,
8744 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00008745 true,
Steve Block6ded16b2010-05-10 14:33:55 +01008746 true,
8747 kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00008748
8749 __ bind(&throw_out_of_memory_exception);
8750 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
8751
8752 __ bind(&throw_termination_exception);
8753 GenerateThrowUncatchable(masm, TERMINATION);
8754
8755 __ bind(&throw_normal_exception);
8756 GenerateThrowTOS(masm);
8757}
8758
8759
8760void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
8761 // r0: code entry
8762 // r1: function
8763 // r2: receiver
8764 // r3: argc
8765 // [sp+0]: argv
8766
8767 Label invoke, exit;
8768
8769 // Called from C, so do not pop argc and args on exit (preserve sp)
8770 // No need to save register-passed args
8771 // Save callee-saved registers (incl. cp and fp), sp, and lr
8772 __ stm(db_w, sp, kCalleeSaved | lr.bit());
8773
8774 // Get address of argv, see stm above.
8775 // r0: code entry
8776 // r1: function
8777 // r2: receiver
8778 // r3: argc
Leon Clarke4515c472010-02-03 11:58:03 +00008779 __ ldr(r4, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize)); // argv
Steve Blocka7e24c12009-10-30 11:49:00 +00008780
8781 // Push a frame with special values setup to mark it as an entry frame.
8782 // r0: code entry
8783 // r1: function
8784 // r2: receiver
8785 // r3: argc
8786 // r4: argv
8787 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
8788 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
8789 __ mov(r7, Operand(Smi::FromInt(marker)));
8790 __ mov(r6, Operand(Smi::FromInt(marker)));
8791 __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
8792 __ ldr(r5, MemOperand(r5));
Steve Block6ded16b2010-05-10 14:33:55 +01008793 __ Push(r8, r7, r6, r5);
Steve Blocka7e24c12009-10-30 11:49:00 +00008794
8795 // Setup frame pointer for the frame to be pushed.
8796 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
8797
8798 // Call a faked try-block that does the invoke.
8799 __ bl(&invoke);
8800
8801 // Caught exception: Store result (exception) in the pending
8802 // exception field in the JSEnv and return a failure sentinel.
8803 // Coming in here the fp will be invalid because the PushTryHandler below
8804 // sets it to 0 to signal the existence of the JSEntry frame.
8805 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
8806 __ str(r0, MemOperand(ip));
8807 __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
8808 __ b(&exit);
8809
8810 // Invoke: Link this frame into the handler chain.
8811 __ bind(&invoke);
8812 // Must preserve r0-r4, r5-r7 are available.
8813 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
8814 // If an exception not caught by another handler occurs, this handler
8815 // returns control to the code after the bl(&invoke) above, which
8816 // restores all kCalleeSaved registers (including cp and fp) to their
8817 // saved values before returning a failure to C.
8818
8819 // Clear any pending exceptions.
8820 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
8821 __ ldr(r5, MemOperand(ip));
8822 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
8823 __ str(r5, MemOperand(ip));
8824
8825 // Invoke the function by calling through JS entry trampoline builtin.
8826 // Notice that we cannot store a reference to the trampoline code directly in
8827 // this stub, because runtime stubs are not traversed when doing GC.
8828
8829 // Expected registers by Builtins::JSEntryTrampoline
8830 // r0: code entry
8831 // r1: function
8832 // r2: receiver
8833 // r3: argc
8834 // r4: argv
8835 if (is_construct) {
8836 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
8837 __ mov(ip, Operand(construct_entry));
8838 } else {
8839 ExternalReference entry(Builtins::JSEntryTrampoline);
8840 __ mov(ip, Operand(entry));
8841 }
8842 __ ldr(ip, MemOperand(ip)); // deref address
8843
8844 // Branch and link to JSEntryTrampoline. We don't use the double underscore
8845 // macro for the add instruction because we don't want the coverage tool
8846 // inserting instructions here after we read the pc.
8847 __ mov(lr, Operand(pc));
8848 masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
8849
8850 // Unlink this frame from the handler chain. When reading the
8851 // address of the next handler, there is no need to use the address
8852 // displacement since the current stack pointer (sp) points directly
8853 // to the stack handler.
8854 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
8855 __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
8856 __ str(r3, MemOperand(ip));
8857 // No need to restore registers
8858 __ add(sp, sp, Operand(StackHandlerConstants::kSize));
8859
8860
8861 __ bind(&exit); // r0 holds result
8862 // Restore the top frame descriptors from the stack.
8863 __ pop(r3);
8864 __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
8865 __ str(r3, MemOperand(ip));
8866
8867 // Reset the stack to the callee saved registers.
8868 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
8869
8870 // Restore callee-saved registers and return.
8871#ifdef DEBUG
8872 if (FLAG_debug_code) {
8873 __ mov(lr, Operand(pc));
8874 }
8875#endif
8876 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
8877}
8878
8879
8880// This stub performs an instanceof, calling the builtin function if
8881// necessary. Uses r1 for the object, r0 for the function that it may
8882// be an instance of (these are fetched from the stack).
8883void InstanceofStub::Generate(MacroAssembler* masm) {
8884 // Get the object - slow case for smis (we may need to throw an exception
8885 // depending on the rhs).
8886 Label slow, loop, is_instance, is_not_instance;
8887 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
8888 __ BranchOnSmi(r0, &slow);
8889
8890 // Check that the left hand is a JS object and put map in r3.
8891 __ CompareObjectType(r0, r3, r2, FIRST_JS_OBJECT_TYPE);
8892 __ b(lt, &slow);
8893 __ cmp(r2, Operand(LAST_JS_OBJECT_TYPE));
8894 __ b(gt, &slow);
8895
8896 // Get the prototype of the function (r4 is result, r2 is scratch).
Andrei Popescu402d9372010-02-26 13:31:12 +00008897 __ ldr(r1, MemOperand(sp, 0));
Kristian Monsen25f61362010-05-21 11:50:48 +01008898 // r1 is function, r3 is map.
8899
8900 // Look up the function and the map in the instanceof cache.
8901 Label miss;
8902 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
8903 __ cmp(r1, ip);
8904 __ b(ne, &miss);
8905 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
8906 __ cmp(r3, ip);
8907 __ b(ne, &miss);
8908 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
8909 __ pop();
8910 __ pop();
8911 __ mov(pc, Operand(lr));
8912
8913 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00008914 __ TryGetFunctionPrototype(r1, r4, r2, &slow);
8915
8916 // Check that the function prototype is a JS object.
8917 __ BranchOnSmi(r4, &slow);
8918 __ CompareObjectType(r4, r5, r5, FIRST_JS_OBJECT_TYPE);
8919 __ b(lt, &slow);
8920 __ cmp(r5, Operand(LAST_JS_OBJECT_TYPE));
8921 __ b(gt, &slow);
8922
Kristian Monsen25f61362010-05-21 11:50:48 +01008923 __ StoreRoot(r1, Heap::kInstanceofCacheFunctionRootIndex);
8924 __ StoreRoot(r3, Heap::kInstanceofCacheMapRootIndex);
8925
Steve Blocka7e24c12009-10-30 11:49:00 +00008926 // Register mapping: r3 is object map and r4 is function prototype.
8927 // Get prototype of object into r2.
8928 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset));
8929
8930 // Loop through the prototype chain looking for the function prototype.
8931 __ bind(&loop);
8932 __ cmp(r2, Operand(r4));
8933 __ b(eq, &is_instance);
8934 __ LoadRoot(ip, Heap::kNullValueRootIndex);
8935 __ cmp(r2, ip);
8936 __ b(eq, &is_not_instance);
8937 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
8938 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
8939 __ jmp(&loop);
8940
8941 __ bind(&is_instance);
8942 __ mov(r0, Operand(Smi::FromInt(0)));
Kristian Monsen25f61362010-05-21 11:50:48 +01008943 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00008944 __ pop();
8945 __ pop();
8946 __ mov(pc, Operand(lr)); // Return.
8947
8948 __ bind(&is_not_instance);
8949 __ mov(r0, Operand(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +01008950 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00008951 __ pop();
8952 __ pop();
8953 __ mov(pc, Operand(lr)); // Return.
8954
8955 // Slow-case. Tail call builtin.
8956 __ bind(&slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008957 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
8958}
8959
8960
Steve Blocka7e24c12009-10-30 11:49:00 +00008961void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
8962 // The displacement is the offset of the last parameter (if any)
8963 // relative to the frame pointer.
8964 static const int kDisplacement =
8965 StandardFrameConstants::kCallerSPOffset - kPointerSize;
8966
8967 // Check that the key is a smi.
8968 Label slow;
8969 __ BranchOnNotSmi(r1, &slow);
8970
8971 // Check if the calling frame is an arguments adaptor frame.
8972 Label adaptor;
8973 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
8974 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
8975 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
8976 __ b(eq, &adaptor);
8977
8978 // Check index against formal parameters count limit passed in
Steve Blockd0582a62009-12-15 09:54:21 +00008979 // through register r0. Use unsigned comparison to get negative
Steve Blocka7e24c12009-10-30 11:49:00 +00008980 // check for free.
8981 __ cmp(r1, r0);
8982 __ b(cs, &slow);
8983
8984 // Read the argument from the stack and return it.
8985 __ sub(r3, r0, r1);
8986 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
8987 __ ldr(r0, MemOperand(r3, kDisplacement));
8988 __ Jump(lr);
8989
8990 // Arguments adaptor case: Check index against actual arguments
8991 // limit found in the arguments adaptor frame. Use unsigned
8992 // comparison to get negative check for free.
8993 __ bind(&adaptor);
8994 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
8995 __ cmp(r1, r0);
8996 __ b(cs, &slow);
8997
8998 // Read the argument from the adaptor frame and return it.
8999 __ sub(r3, r0, r1);
9000 __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
9001 __ ldr(r0, MemOperand(r3, kDisplacement));
9002 __ Jump(lr);
9003
9004 // Slow-case: Handle non-smi or out-of-bounds access to arguments
9005 // by calling the runtime system.
9006 __ bind(&slow);
9007 __ push(r1);
Steve Block6ded16b2010-05-10 14:33:55 +01009008 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009009}
9010
9011
9012void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009013 // sp[0] : number of parameters
9014 // sp[4] : receiver displacement
9015 // sp[8] : function
9016
Steve Blocka7e24c12009-10-30 11:49:00 +00009017 // Check if the calling frame is an arguments adaptor frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00009018 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +00009019 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
9020 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
9021 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Andrei Popescu402d9372010-02-26 13:31:12 +00009022 __ b(eq, &adaptor_frame);
9023
9024 // Get the length from the frame.
9025 __ ldr(r1, MemOperand(sp, 0));
9026 __ b(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +00009027
9028 // Patch the arguments.length and the parameters pointer.
Andrei Popescu402d9372010-02-26 13:31:12 +00009029 __ bind(&adaptor_frame);
9030 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
9031 __ str(r1, MemOperand(sp, 0));
9032 __ add(r3, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00009033 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
9034 __ str(r3, MemOperand(sp, 1 * kPointerSize));
9035
Andrei Popescu402d9372010-02-26 13:31:12 +00009036 // Try the new space allocation. Start out with computing the size
Kristian Monsen25f61362010-05-21 11:50:48 +01009037 // of the arguments object and the elements array in words.
Andrei Popescu402d9372010-02-26 13:31:12 +00009038 Label add_arguments_object;
9039 __ bind(&try_allocate);
9040 __ cmp(r1, Operand(0));
9041 __ b(eq, &add_arguments_object);
9042 __ mov(r1, Operand(r1, LSR, kSmiTagSize));
9043 __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
9044 __ bind(&add_arguments_object);
9045 __ add(r1, r1, Operand(Heap::kArgumentsObjectSize / kPointerSize));
9046
9047 // Do the allocation of both objects in one go.
Kristian Monsen25f61362010-05-21 11:50:48 +01009048 __ AllocateInNewSpace(
9049 r1,
9050 r0,
9051 r2,
9052 r3,
9053 &runtime,
9054 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Andrei Popescu402d9372010-02-26 13:31:12 +00009055
9056 // Get the arguments boilerplate from the current (global) context.
9057 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
9058 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
9059 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
9060 __ ldr(r4, MemOperand(r4, offset));
9061
9062 // Copy the JS object part.
9063 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
9064 __ ldr(r3, FieldMemOperand(r4, i));
9065 __ str(r3, FieldMemOperand(r0, i));
9066 }
9067
9068 // Setup the callee in-object property.
9069 ASSERT(Heap::arguments_callee_index == 0);
9070 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
9071 __ str(r3, FieldMemOperand(r0, JSObject::kHeaderSize));
9072
9073 // Get the length (smi tagged) and set that as an in-object property too.
9074 ASSERT(Heap::arguments_length_index == 1);
9075 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
9076 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + kPointerSize));
9077
9078 // If there are no actual arguments, we're done.
9079 Label done;
9080 __ cmp(r1, Operand(0));
9081 __ b(eq, &done);
9082
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009083 // Get the parameters pointer from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00009084 __ ldr(r2, MemOperand(sp, 1 * kPointerSize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009085
9086 // Setup the elements pointer in the allocated arguments object and
9087 // initialize the header in the elements fixed array.
9088 __ add(r4, r0, Operand(Heap::kArgumentsObjectSize));
9089 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
9090 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
9091 __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
9092 __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009093 __ mov(r1, Operand(r1, LSR, kSmiTagSize)); // Untag the length for the loop.
Andrei Popescu402d9372010-02-26 13:31:12 +00009094
9095 // Copy the fixed array slots.
9096 Label loop;
9097 // Setup r4 to point to the first array slot.
9098 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
9099 __ bind(&loop);
9100 // Pre-decrement r2 with kPointerSize on each iteration.
9101 // Pre-decrement in order to skip receiver.
9102 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
9103 // Post-increment r4 with kPointerSize on each iteration.
9104 __ str(r3, MemOperand(r4, kPointerSize, PostIndex));
9105 __ sub(r1, r1, Operand(1));
9106 __ cmp(r1, Operand(0));
9107 __ b(ne, &loop);
9108
9109 // Return and remove the on-stack parameters.
9110 __ bind(&done);
9111 __ add(sp, sp, Operand(3 * kPointerSize));
9112 __ Ret();
9113
Steve Blocka7e24c12009-10-30 11:49:00 +00009114 // Do the runtime call to allocate the arguments object.
9115 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009116 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
9117}
9118
9119
9120void RegExpExecStub::Generate(MacroAssembler* masm) {
9121 // Just jump directly to runtime if native RegExp is not selected at compile
9122 // time or if regexp entry in generated code is turned off runtime switch or
9123 // at compilation.
Kristian Monsen25f61362010-05-21 11:50:48 +01009124#ifdef V8_INTERPRETED_REGEXP
Steve Block6ded16b2010-05-10 14:33:55 +01009125 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01009126#else // V8_INTERPRETED_REGEXP
Steve Block6ded16b2010-05-10 14:33:55 +01009127 if (!FLAG_regexp_entry_native) {
9128 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
9129 return;
9130 }
9131
9132 // Stack frame on entry.
9133 // sp[0]: last_match_info (expected JSArray)
9134 // sp[4]: previous index
9135 // sp[8]: subject string
9136 // sp[12]: JSRegExp object
9137
9138 static const int kLastMatchInfoOffset = 0 * kPointerSize;
9139 static const int kPreviousIndexOffset = 1 * kPointerSize;
9140 static const int kSubjectOffset = 2 * kPointerSize;
9141 static const int kJSRegExpOffset = 3 * kPointerSize;
9142
9143 Label runtime, invoke_regexp;
9144
9145 // Allocation of registers for this function. These are in callee save
9146 // registers and will be preserved by the call to the native RegExp code, as
9147 // this code is called using the normal C calling convention. When calling
9148 // directly from generated code the native RegExp code will not do a GC and
9149 // therefore the content of these registers are safe to use after the call.
9150 Register subject = r4;
9151 Register regexp_data = r5;
9152 Register last_match_info_elements = r6;
9153
9154 // Ensure that a RegExp stack is allocated.
9155 ExternalReference address_of_regexp_stack_memory_address =
9156 ExternalReference::address_of_regexp_stack_memory_address();
9157 ExternalReference address_of_regexp_stack_memory_size =
9158 ExternalReference::address_of_regexp_stack_memory_size();
9159 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
9160 __ ldr(r0, MemOperand(r0, 0));
9161 __ tst(r0, Operand(r0));
9162 __ b(eq, &runtime);
9163
9164 // Check that the first argument is a JSRegExp object.
9165 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
9166 ASSERT_EQ(0, kSmiTag);
9167 __ tst(r0, Operand(kSmiTagMask));
9168 __ b(eq, &runtime);
9169 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
9170 __ b(ne, &runtime);
9171
9172 // Check that the RegExp has been compiled (data contains a fixed array).
9173 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
9174 if (FLAG_debug_code) {
9175 __ tst(regexp_data, Operand(kSmiTagMask));
9176 __ Check(nz, "Unexpected type for RegExp data, FixedArray expected");
9177 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
9178 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
9179 }
9180
9181 // regexp_data: RegExp data (FixedArray)
9182 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
9183 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
9184 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
9185 __ b(ne, &runtime);
9186
9187 // regexp_data: RegExp data (FixedArray)
9188 // Check that the number of captures fit in the static offsets vector buffer.
9189 __ ldr(r2,
9190 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
9191 // Calculate number of capture registers (number_of_captures + 1) * 2. This
9192 // uses the asumption that smis are 2 * their untagged value.
9193 ASSERT_EQ(0, kSmiTag);
9194 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
9195 __ add(r2, r2, Operand(2)); // r2 was a smi.
9196 // Check that the static offsets vector buffer is large enough.
9197 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
9198 __ b(hi, &runtime);
9199
9200 // r2: Number of capture registers
9201 // regexp_data: RegExp data (FixedArray)
9202 // Check that the second argument is a string.
9203 __ ldr(subject, MemOperand(sp, kSubjectOffset));
9204 __ tst(subject, Operand(kSmiTagMask));
9205 __ b(eq, &runtime);
9206 Condition is_string = masm->IsObjectStringType(subject, r0);
9207 __ b(NegateCondition(is_string), &runtime);
9208 // Get the length of the string to r3.
9209 __ ldr(r3, FieldMemOperand(subject, String::kLengthOffset));
9210
9211 // r2: Number of capture registers
9212 // r3: Length of subject string as a smi
9213 // subject: Subject string
9214 // regexp_data: RegExp data (FixedArray)
9215 // Check that the third argument is a positive smi less than the subject
9216 // string length. A negative value will be greater (unsigned comparison).
9217 __ ldr(r0, MemOperand(sp, kPreviousIndexOffset));
9218 __ tst(r0, Operand(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +01009219 __ b(ne, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009220 __ cmp(r3, Operand(r0));
Kristian Monsen25f61362010-05-21 11:50:48 +01009221 __ b(ls, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009222
9223 // r2: Number of capture registers
9224 // subject: Subject string
9225 // regexp_data: RegExp data (FixedArray)
9226 // Check that the fourth object is a JSArray object.
9227 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
9228 __ tst(r0, Operand(kSmiTagMask));
9229 __ b(eq, &runtime);
9230 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
9231 __ b(ne, &runtime);
9232 // Check that the JSArray is in fast case.
9233 __ ldr(last_match_info_elements,
9234 FieldMemOperand(r0, JSArray::kElementsOffset));
9235 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009236 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01009237 __ cmp(r0, ip);
9238 __ b(ne, &runtime);
9239 // Check that the last match info has space for the capture registers and the
9240 // additional information.
9241 __ ldr(r0,
9242 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
9243 __ add(r2, r2, Operand(RegExpImpl::kLastMatchOverhead));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009244 __ cmp(r2, Operand(r0, ASR, kSmiTagSize));
Steve Block6ded16b2010-05-10 14:33:55 +01009245 __ b(gt, &runtime);
9246
9247 // subject: Subject string
9248 // regexp_data: RegExp data (FixedArray)
9249 // Check the representation and encoding of the subject string.
9250 Label seq_string;
9251 const int kStringRepresentationEncodingMask =
9252 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
9253 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9254 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
9255 __ and_(r1, r0, Operand(kStringRepresentationEncodingMask));
9256 // First check for sequential string.
9257 ASSERT_EQ(0, kStringTag);
9258 ASSERT_EQ(0, kSeqStringTag);
9259 __ tst(r1, Operand(kIsNotStringMask | kStringRepresentationMask));
9260 __ b(eq, &seq_string);
9261
9262 // subject: Subject string
9263 // regexp_data: RegExp data (FixedArray)
9264 // Check for flat cons string.
9265 // A flat cons string is a cons string where the second part is the empty
9266 // string. In that case the subject string is just the first part of the cons
9267 // string. Also in this case the first part of the cons string is known to be
9268 // a sequential string or an external string.
9269 __ and_(r0, r0, Operand(kStringRepresentationMask));
9270 __ cmp(r0, Operand(kConsStringTag));
9271 __ b(ne, &runtime);
9272 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
9273 __ LoadRoot(r1, Heap::kEmptyStringRootIndex);
9274 __ cmp(r0, r1);
9275 __ b(ne, &runtime);
9276 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
9277 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9278 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
9279 ASSERT_EQ(0, kSeqStringTag);
9280 __ tst(r0, Operand(kStringRepresentationMask));
9281 __ b(nz, &runtime);
9282 __ and_(r1, r0, Operand(kStringRepresentationEncodingMask));
9283
9284 __ bind(&seq_string);
9285 // r1: suject string type & kStringRepresentationEncodingMask
9286 // subject: Subject string
9287 // regexp_data: RegExp data (FixedArray)
9288 // Check that the irregexp code has been generated for an ascii string. If
9289 // it has, the field contains a code object otherwise it contains the hole.
9290#ifdef DEBUG
9291 const int kSeqAsciiString = kStringTag | kSeqStringTag | kAsciiStringTag;
9292 const int kSeqTwoByteString = kStringTag | kSeqStringTag | kTwoByteStringTag;
9293 CHECK_EQ(4, kSeqAsciiString);
9294 CHECK_EQ(0, kSeqTwoByteString);
9295#endif
9296 // Find the code object based on the assumptions above.
9297 __ mov(r3, Operand(r1, ASR, 2), SetCC);
9298 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset), ne);
9299 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
9300
9301 // Check that the irregexp code has been generated for the actual string
9302 // encoding. If it has, the field contains a code object otherwise it contains
9303 // the hole.
9304 __ CompareObjectType(r7, r0, r0, CODE_TYPE);
9305 __ b(ne, &runtime);
9306
9307 // r3: encoding of subject string (1 if ascii, 0 if two_byte);
9308 // r7: code
9309 // subject: Subject string
9310 // regexp_data: RegExp data (FixedArray)
9311 // Load used arguments before starting to push arguments for call to native
9312 // RegExp code to avoid handling changing stack height.
9313 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset));
9314 __ mov(r1, Operand(r1, ASR, kSmiTagSize));
9315
9316 // r1: previous index
9317 // r3: encoding of subject string (1 if ascii, 0 if two_byte);
9318 // r7: code
9319 // subject: Subject string
9320 // regexp_data: RegExp data (FixedArray)
9321 // All checks done. Now push arguments for native regexp code.
9322 __ IncrementCounter(&Counters::regexp_entry_native, 1, r0, r2);
9323
9324 static const int kRegExpExecuteArguments = 7;
9325 __ push(lr);
9326 __ PrepareCallCFunction(kRegExpExecuteArguments, r0);
9327
9328 // Argument 7 (sp[8]): Indicate that this is a direct call from JavaScript.
9329 __ mov(r0, Operand(1));
9330 __ str(r0, MemOperand(sp, 2 * kPointerSize));
9331
9332 // Argument 6 (sp[4]): Start (high end) of backtracking stack memory area.
9333 __ mov(r0, Operand(address_of_regexp_stack_memory_address));
9334 __ ldr(r0, MemOperand(r0, 0));
9335 __ mov(r2, Operand(address_of_regexp_stack_memory_size));
9336 __ ldr(r2, MemOperand(r2, 0));
9337 __ add(r0, r0, Operand(r2));
9338 __ str(r0, MemOperand(sp, 1 * kPointerSize));
9339
9340 // Argument 5 (sp[0]): static offsets vector buffer.
9341 __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector()));
9342 __ str(r0, MemOperand(sp, 0 * kPointerSize));
9343
9344 // For arguments 4 and 3 get string length, calculate start of string data and
9345 // calculate the shift of the index (0 for ASCII and 1 for two byte).
9346 __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
9347 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
9348 ASSERT_EQ(SeqAsciiString::kHeaderSize, SeqTwoByteString::kHeaderSize);
9349 __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9350 __ eor(r3, r3, Operand(1));
9351 // Argument 4 (r3): End of string data
9352 // Argument 3 (r2): Start of string data
9353 __ add(r2, r9, Operand(r1, LSL, r3));
9354 __ add(r3, r9, Operand(r0, LSL, r3));
9355
9356 // Argument 2 (r1): Previous index.
9357 // Already there
9358
9359 // Argument 1 (r0): Subject string.
9360 __ mov(r0, subject);
9361
9362 // Locate the code entry and call it.
9363 __ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
9364 __ CallCFunction(r7, kRegExpExecuteArguments);
9365 __ pop(lr);
9366
9367 // r0: result
9368 // subject: subject string (callee saved)
9369 // regexp_data: RegExp data (callee saved)
9370 // last_match_info_elements: Last match info elements (callee saved)
9371
9372 // Check the result.
9373 Label success;
9374 __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
9375 __ b(eq, &success);
9376 Label failure;
9377 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
9378 __ b(eq, &failure);
9379 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
9380 // If not exception it can only be retry. Handle that in the runtime system.
9381 __ b(ne, &runtime);
9382 // Result must now be exception. If there is no pending exception already a
9383 // stack overflow (on the backtrack stack) was detected in RegExp code but
9384 // haven't created the exception yet. Handle that in the runtime system.
9385 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
9386 __ mov(r0, Operand(ExternalReference::the_hole_value_location()));
9387 __ ldr(r0, MemOperand(r0, 0));
9388 __ mov(r1, Operand(ExternalReference(Top::k_pending_exception_address)));
9389 __ ldr(r1, MemOperand(r1, 0));
9390 __ cmp(r0, r1);
9391 __ b(eq, &runtime);
9392 __ bind(&failure);
9393 // For failure and exception return null.
9394 __ mov(r0, Operand(Factory::null_value()));
9395 __ add(sp, sp, Operand(4 * kPointerSize));
9396 __ Ret();
9397
9398 // Process the result from the native regexp code.
9399 __ bind(&success);
9400 __ ldr(r1,
9401 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
9402 // Calculate number of capture registers (number_of_captures + 1) * 2.
9403 ASSERT_EQ(0, kSmiTag);
9404 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
9405 __ add(r1, r1, Operand(2)); // r1 was a smi.
9406
9407 // r1: number of capture registers
9408 // r4: subject string
9409 // Store the capture count.
9410 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi.
9411 __ str(r2, FieldMemOperand(last_match_info_elements,
9412 RegExpImpl::kLastCaptureCountOffset));
9413 // Store last subject and last input.
9414 __ mov(r3, last_match_info_elements); // Moved up to reduce latency.
9415 __ mov(r2, Operand(RegExpImpl::kLastSubjectOffset)); // Ditto.
9416 __ str(subject,
9417 FieldMemOperand(last_match_info_elements,
9418 RegExpImpl::kLastSubjectOffset));
9419 __ RecordWrite(r3, r2, r7);
9420 __ str(subject,
9421 FieldMemOperand(last_match_info_elements,
9422 RegExpImpl::kLastInputOffset));
9423 __ mov(r3, last_match_info_elements);
9424 __ mov(r2, Operand(RegExpImpl::kLastInputOffset));
9425 __ RecordWrite(r3, r2, r7);
9426
9427 // Get the static offsets vector filled by the native regexp code.
9428 ExternalReference address_of_static_offsets_vector =
9429 ExternalReference::address_of_static_offsets_vector();
9430 __ mov(r2, Operand(address_of_static_offsets_vector));
9431
9432 // r1: number of capture registers
9433 // r2: offsets vector
9434 Label next_capture, done;
9435 // Capture register counter starts from number of capture registers and
9436 // counts down until wraping after zero.
9437 __ add(r0,
9438 last_match_info_elements,
9439 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
9440 __ bind(&next_capture);
9441 __ sub(r1, r1, Operand(1), SetCC);
9442 __ b(mi, &done);
9443 // Read the value from the static offsets vector buffer.
9444 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
9445 // Store the smi value in the last match info.
9446 __ mov(r3, Operand(r3, LSL, kSmiTagSize));
9447 __ str(r3, MemOperand(r0, kPointerSize, PostIndex));
9448 __ jmp(&next_capture);
9449 __ bind(&done);
9450
9451 // Return last match info.
9452 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
9453 __ add(sp, sp, Operand(4 * kPointerSize));
9454 __ Ret();
9455
9456 // Do the runtime call to execute the regexp.
9457 __ bind(&runtime);
9458 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01009459#endif // V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +00009460}
9461
9462
9463void CallFunctionStub::Generate(MacroAssembler* masm) {
9464 Label slow;
Leon Clarkee46be812010-01-19 14:06:41 +00009465
9466 // If the receiver might be a value (string, number or boolean) check for this
9467 // and box it if it is.
9468 if (ReceiverMightBeValue()) {
9469 // Get the receiver from the stack.
9470 // function, receiver [, arguments]
9471 Label receiver_is_value, receiver_is_js_object;
9472 __ ldr(r1, MemOperand(sp, argc_ * kPointerSize));
9473
9474 // Check if receiver is a smi (which is a number value).
9475 __ BranchOnSmi(r1, &receiver_is_value);
9476
9477 // Check if the receiver is a valid JS object.
9478 __ CompareObjectType(r1, r2, r2, FIRST_JS_OBJECT_TYPE);
9479 __ b(ge, &receiver_is_js_object);
9480
9481 // Call the runtime to box the value.
9482 __ bind(&receiver_is_value);
9483 __ EnterInternalFrame();
9484 __ push(r1);
9485 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
9486 __ LeaveInternalFrame();
9487 __ str(r0, MemOperand(sp, argc_ * kPointerSize));
9488
9489 __ bind(&receiver_is_js_object);
9490 }
9491
Steve Blocka7e24c12009-10-30 11:49:00 +00009492 // Get the function to call from the stack.
9493 // function, receiver [, arguments]
9494 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
9495
9496 // Check that the function is really a JavaScript function.
9497 // r1: pushed function (to be verified)
9498 __ BranchOnSmi(r1, &slow);
9499 // Get the map of the function object.
9500 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
9501 __ b(ne, &slow);
9502
9503 // Fast-case: Invoke the function now.
9504 // r1: pushed function
9505 ParameterCount actual(argc_);
9506 __ InvokeFunction(r1, actual, JUMP_FUNCTION);
9507
9508 // Slow-case: Non-function called.
9509 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +00009510 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
9511 // of the original receiver from the call site).
9512 __ str(r1, MemOperand(sp, argc_ * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00009513 __ mov(r0, Operand(argc_)); // Setup the number of arguments.
9514 __ mov(r2, Operand(0));
9515 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
9516 __ Jump(Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)),
9517 RelocInfo::CODE_TARGET);
9518}
9519
9520
Steve Block6ded16b2010-05-10 14:33:55 +01009521// Unfortunately you have to run without snapshots to see most of these
9522// names in the profile since most compare stubs end up in the snapshot.
Leon Clarkee46be812010-01-19 14:06:41 +00009523const char* CompareStub::GetName() {
Steve Block6ded16b2010-05-10 14:33:55 +01009524 if (name_ != NULL) return name_;
9525 const int kMaxNameLength = 100;
9526 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
9527 if (name_ == NULL) return "OOM";
9528
9529 const char* cc_name;
Leon Clarkee46be812010-01-19 14:06:41 +00009530 switch (cc_) {
Steve Block6ded16b2010-05-10 14:33:55 +01009531 case lt: cc_name = "LT"; break;
9532 case gt: cc_name = "GT"; break;
9533 case le: cc_name = "LE"; break;
9534 case ge: cc_name = "GE"; break;
9535 case eq: cc_name = "EQ"; break;
9536 case ne: cc_name = "NE"; break;
9537 default: cc_name = "UnknownCondition"; break;
Leon Clarkee46be812010-01-19 14:06:41 +00009538 }
Steve Block6ded16b2010-05-10 14:33:55 +01009539
9540 const char* strict_name = "";
9541 if (strict_ && (cc_ == eq || cc_ == ne)) {
9542 strict_name = "_STRICT";
9543 }
9544
9545 const char* never_nan_nan_name = "";
9546 if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
9547 never_nan_nan_name = "_NO_NAN";
9548 }
9549
9550 const char* include_number_compare_name = "";
9551 if (!include_number_compare_) {
9552 include_number_compare_name = "_NO_NUMBER";
9553 }
9554
9555 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
9556 "CompareStub_%s%s%s%s",
9557 cc_name,
9558 strict_name,
9559 never_nan_nan_name,
9560 include_number_compare_name);
9561 return name_;
Leon Clarkee46be812010-01-19 14:06:41 +00009562}
9563
9564
Steve Blocka7e24c12009-10-30 11:49:00 +00009565int CompareStub::MinorKey() {
Steve Block6ded16b2010-05-10 14:33:55 +01009566 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
9567 // stubs the never NaN NaN condition is only taken into account if the
9568 // condition is equals.
9569 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 13));
9570 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
9571 | StrictField::encode(strict_)
9572 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
9573 | IncludeNumberCompareField::encode(include_number_compare_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009574}
9575
9576
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009577// StringCharCodeAtGenerator
9578
9579void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
9580 Label flat_string;
Steve Block6ded16b2010-05-10 14:33:55 +01009581 Label ascii_string;
9582 Label got_char_code;
9583
9584 // If the receiver is a smi trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009585 __ BranchOnSmi(object_, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +01009586
9587 // Fetch the instance type of the receiver into result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009588 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
9589 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009590 // If the receiver is not a string trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009591 __ tst(result_, Operand(kIsNotStringMask));
9592 __ b(ne, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +01009593
9594 // If the index is non-smi trigger the non-smi case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009595 __ BranchOnNotSmi(index_, &index_not_smi_);
9596
9597 // Put smi-tagged index into scratch register.
9598 __ mov(scratch_, index_);
9599 __ bind(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +01009600
9601 // Check for index out of range.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009602 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
9603 __ cmp(ip, Operand(scratch_));
9604 __ b(ls, index_out_of_range_);
Steve Block6ded16b2010-05-10 14:33:55 +01009605
9606 // We need special handling for non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009607 ASSERT(kSeqStringTag == 0);
9608 __ tst(result_, Operand(kStringRepresentationMask));
9609 __ b(eq, &flat_string);
Steve Block6ded16b2010-05-10 14:33:55 +01009610
9611 // Handle non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009612 __ tst(result_, Operand(kIsConsStringMask));
9613 __ b(eq, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +01009614
9615 // ConsString.
9616 // Check whether the right hand side is the empty string (i.e. if
9617 // this is really a flat string in a cons string). If that is not
9618 // the case we would rather go to the runtime system now to flatten
9619 // the string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009620 __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
9621 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
9622 __ cmp(result_, Operand(ip));
9623 __ b(ne, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +01009624 // Get the first of the two strings and load its instance type.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009625 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
9626 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
9627 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
9628 // If the first cons component is also non-flat, then go to runtime.
9629 ASSERT(kSeqStringTag == 0);
9630 __ tst(result_, Operand(kStringRepresentationMask));
9631 __ b(nz, &call_runtime_);
9632
9633 // Check for 1-byte or 2-byte string.
9634 __ bind(&flat_string);
9635 ASSERT(kAsciiStringTag != 0);
9636 __ tst(result_, Operand(kStringEncodingMask));
9637 __ b(nz, &ascii_string);
9638
9639 // 2-byte string.
9640 // Load the 2-byte character code into the result register. We can
9641 // add without shifting since the smi tag size is the log2 of the
9642 // number of bytes in a two-byte character.
9643 ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
9644 __ add(scratch_, object_, Operand(scratch_));
9645 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
9646 __ jmp(&got_char_code);
Steve Block6ded16b2010-05-10 14:33:55 +01009647
9648 // ASCII string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009649 // Load the byte into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +01009650 __ bind(&ascii_string);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009651 __ add(scratch_, object_, Operand(scratch_, LSR, kSmiTagSize));
9652 __ ldrb(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +01009653
9654 __ bind(&got_char_code);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009655 __ mov(result_, Operand(result_, LSL, kSmiTagSize));
9656 __ bind(&exit_);
Steve Block6ded16b2010-05-10 14:33:55 +01009657}
9658
9659
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009660void StringCharCodeAtGenerator::GenerateSlow(
9661 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
9662 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
Steve Block6ded16b2010-05-10 14:33:55 +01009663
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009664 // Index is not a smi.
9665 __ bind(&index_not_smi_);
9666 // If index is a heap number, try converting it to an integer.
9667 __ CheckMap(index_,
9668 scratch_,
9669 Heap::kHeapNumberMapRootIndex,
9670 index_not_number_,
9671 true);
9672 call_helper.BeforeCall(masm);
9673 __ Push(object_, index_);
9674 __ push(index_); // Consumed by runtime conversion function.
9675 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
9676 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
9677 } else {
9678 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
9679 // NumberToSmi discards numbers that are not exact integers.
9680 __ CallRuntime(Runtime::kNumberToSmi, 1);
9681 }
9682 if (!scratch_.is(r0)) {
9683 // Save the conversion result before the pop instructions below
9684 // have a chance to overwrite it.
9685 __ mov(scratch_, r0);
9686 }
9687 __ pop(index_);
9688 __ pop(object_);
9689 // Reload the instance type.
9690 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
9691 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
9692 call_helper.AfterCall(masm);
9693 // If index is still not a smi, it must be out of range.
9694 __ BranchOnNotSmi(scratch_, index_out_of_range_);
9695 // Otherwise, return to the fast path.
9696 __ jmp(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +01009697
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009698 // Call runtime. We get here when the receiver is a string and the
9699 // index is a number, but the code of getting the actual character
9700 // is too complex (e.g., when the string needs to be flattened).
9701 __ bind(&call_runtime_);
9702 call_helper.BeforeCall(masm);
9703 __ Push(object_, index_);
9704 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
9705 if (!result_.is(r0)) {
9706 __ mov(result_, r0);
9707 }
9708 call_helper.AfterCall(masm);
9709 __ jmp(&exit_);
9710
9711 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
9712}
9713
9714
9715// -------------------------------------------------------------------------
9716// StringCharFromCodeGenerator
9717
9718void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01009719 // Fast case of Heap::LookupSingleCharacterStringFromCode.
9720 ASSERT(kSmiTag == 0);
9721 ASSERT(kSmiShiftSize == 0);
9722 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009723 __ tst(code_,
9724 Operand(kSmiTagMask |
9725 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
9726 __ b(nz, &slow_case_);
Steve Block6ded16b2010-05-10 14:33:55 +01009727
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009728 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
9729 // At this point code register contains smi tagged ascii char code.
Steve Block6ded16b2010-05-10 14:33:55 +01009730 ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009731 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
9732 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
9733 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
9734 __ cmp(result_, Operand(ip));
9735 __ b(eq, &slow_case_);
9736 __ bind(&exit_);
9737}
Steve Block6ded16b2010-05-10 14:33:55 +01009738
Steve Block6ded16b2010-05-10 14:33:55 +01009739
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009740void StringCharFromCodeGenerator::GenerateSlow(
9741 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
9742 __ Abort("Unexpected fallthrough to CharFromCode slow case");
9743
9744 __ bind(&slow_case_);
9745 call_helper.BeforeCall(masm);
9746 __ push(code_);
9747 __ CallRuntime(Runtime::kCharFromCode, 1);
9748 if (!result_.is(r0)) {
9749 __ mov(result_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01009750 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009751 call_helper.AfterCall(masm);
9752 __ jmp(&exit_);
9753
9754 __ Abort("Unexpected fallthrough from CharFromCode slow case");
9755}
9756
9757
9758// -------------------------------------------------------------------------
9759// StringCharAtGenerator
9760
9761void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
9762 char_code_at_generator_.GenerateFast(masm);
9763 char_from_code_generator_.GenerateFast(masm);
9764}
9765
9766
9767void StringCharAtGenerator::GenerateSlow(
9768 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
9769 char_code_at_generator_.GenerateSlow(masm, call_helper);
9770 char_from_code_generator_.GenerateSlow(masm, call_helper);
Steve Block6ded16b2010-05-10 14:33:55 +01009771}
9772
9773
9774void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
9775 Register dest,
9776 Register src,
9777 Register count,
9778 Register scratch,
9779 bool ascii) {
Andrei Popescu31002712010-02-23 13:46:05 +00009780 Label loop;
9781 Label done;
9782 // This loop just copies one character at a time, as it is only used for very
9783 // short strings.
9784 if (!ascii) {
9785 __ add(count, count, Operand(count), SetCC);
9786 } else {
9787 __ cmp(count, Operand(0));
9788 }
9789 __ b(eq, &done);
9790
9791 __ bind(&loop);
9792 __ ldrb(scratch, MemOperand(src, 1, PostIndex));
9793 // Perform sub between load and dependent store to get the load time to
9794 // complete.
9795 __ sub(count, count, Operand(1), SetCC);
9796 __ strb(scratch, MemOperand(dest, 1, PostIndex));
9797 // last iteration.
9798 __ b(gt, &loop);
9799
9800 __ bind(&done);
9801}
9802
9803
9804enum CopyCharactersFlags {
9805 COPY_ASCII = 1,
9806 DEST_ALWAYS_ALIGNED = 2
9807};
9808
9809
Steve Block6ded16b2010-05-10 14:33:55 +01009810void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
9811 Register dest,
9812 Register src,
9813 Register count,
9814 Register scratch1,
9815 Register scratch2,
9816 Register scratch3,
9817 Register scratch4,
9818 Register scratch5,
9819 int flags) {
Andrei Popescu31002712010-02-23 13:46:05 +00009820 bool ascii = (flags & COPY_ASCII) != 0;
9821 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
9822
9823 if (dest_always_aligned && FLAG_debug_code) {
9824 // Check that destination is actually word aligned if the flag says
9825 // that it is.
9826 __ tst(dest, Operand(kPointerAlignmentMask));
9827 __ Check(eq, "Destination of copy not aligned.");
9828 }
9829
9830 const int kReadAlignment = 4;
9831 const int kReadAlignmentMask = kReadAlignment - 1;
9832 // Ensure that reading an entire aligned word containing the last character
9833 // of a string will not read outside the allocated area (because we pad up
9834 // to kObjectAlignment).
9835 ASSERT(kObjectAlignment >= kReadAlignment);
9836 // Assumes word reads and writes are little endian.
9837 // Nothing to do for zero characters.
9838 Label done;
9839 if (!ascii) {
9840 __ add(count, count, Operand(count), SetCC);
9841 } else {
9842 __ cmp(count, Operand(0));
9843 }
9844 __ b(eq, &done);
9845
9846 // Assume that you cannot read (or write) unaligned.
9847 Label byte_loop;
9848 // Must copy at least eight bytes, otherwise just do it one byte at a time.
9849 __ cmp(count, Operand(8));
9850 __ add(count, dest, Operand(count));
9851 Register limit = count; // Read until src equals this.
9852 __ b(lt, &byte_loop);
9853
9854 if (!dest_always_aligned) {
9855 // Align dest by byte copying. Copies between zero and three bytes.
9856 __ and_(scratch4, dest, Operand(kReadAlignmentMask), SetCC);
9857 Label dest_aligned;
9858 __ b(eq, &dest_aligned);
9859 __ cmp(scratch4, Operand(2));
9860 __ ldrb(scratch1, MemOperand(src, 1, PostIndex));
9861 __ ldrb(scratch2, MemOperand(src, 1, PostIndex), le);
9862 __ ldrb(scratch3, MemOperand(src, 1, PostIndex), lt);
9863 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
9864 __ strb(scratch2, MemOperand(dest, 1, PostIndex), le);
9865 __ strb(scratch3, MemOperand(dest, 1, PostIndex), lt);
9866 __ bind(&dest_aligned);
9867 }
9868
9869 Label simple_loop;
9870
9871 __ sub(scratch4, dest, Operand(src));
9872 __ and_(scratch4, scratch4, Operand(0x03), SetCC);
9873 __ b(eq, &simple_loop);
9874 // Shift register is number of bits in a source word that
9875 // must be combined with bits in the next source word in order
9876 // to create a destination word.
9877
9878 // Complex loop for src/dst that are not aligned the same way.
9879 {
9880 Label loop;
9881 __ mov(scratch4, Operand(scratch4, LSL, 3));
9882 Register left_shift = scratch4;
9883 __ and_(src, src, Operand(~3)); // Round down to load previous word.
9884 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
9885 // Store the "shift" most significant bits of scratch in the least
9886 // signficant bits (i.e., shift down by (32-shift)).
9887 __ rsb(scratch2, left_shift, Operand(32));
9888 Register right_shift = scratch2;
9889 __ mov(scratch1, Operand(scratch1, LSR, right_shift));
9890
9891 __ bind(&loop);
9892 __ ldr(scratch3, MemOperand(src, 4, PostIndex));
9893 __ sub(scratch5, limit, Operand(dest));
9894 __ orr(scratch1, scratch1, Operand(scratch3, LSL, left_shift));
9895 __ str(scratch1, MemOperand(dest, 4, PostIndex));
9896 __ mov(scratch1, Operand(scratch3, LSR, right_shift));
9897 // Loop if four or more bytes left to copy.
9898 // Compare to eight, because we did the subtract before increasing dst.
9899 __ sub(scratch5, scratch5, Operand(8), SetCC);
9900 __ b(ge, &loop);
9901 }
9902 // There is now between zero and three bytes left to copy (negative that
9903 // number is in scratch5), and between one and three bytes already read into
9904 // scratch1 (eight times that number in scratch4). We may have read past
9905 // the end of the string, but because objects are aligned, we have not read
9906 // past the end of the object.
9907 // Find the minimum of remaining characters to move and preloaded characters
9908 // and write those as bytes.
9909 __ add(scratch5, scratch5, Operand(4), SetCC);
9910 __ b(eq, &done);
9911 __ cmp(scratch4, Operand(scratch5, LSL, 3), ne);
9912 // Move minimum of bytes read and bytes left to copy to scratch4.
9913 __ mov(scratch5, Operand(scratch4, LSR, 3), LeaveCC, lt);
9914 // Between one and three (value in scratch5) characters already read into
9915 // scratch ready to write.
9916 __ cmp(scratch5, Operand(2));
9917 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
9918 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, ge);
9919 __ strb(scratch1, MemOperand(dest, 1, PostIndex), ge);
9920 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, gt);
9921 __ strb(scratch1, MemOperand(dest, 1, PostIndex), gt);
9922 // Copy any remaining bytes.
9923 __ b(&byte_loop);
9924
9925 // Simple loop.
9926 // Copy words from src to dst, until less than four bytes left.
9927 // Both src and dest are word aligned.
9928 __ bind(&simple_loop);
9929 {
9930 Label loop;
9931 __ bind(&loop);
9932 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
9933 __ sub(scratch3, limit, Operand(dest));
9934 __ str(scratch1, MemOperand(dest, 4, PostIndex));
9935 // Compare to 8, not 4, because we do the substraction before increasing
9936 // dest.
9937 __ cmp(scratch3, Operand(8));
9938 __ b(ge, &loop);
9939 }
9940
9941 // Copy bytes from src to dst until dst hits limit.
9942 __ bind(&byte_loop);
9943 __ cmp(dest, Operand(limit));
9944 __ ldrb(scratch1, MemOperand(src, 1, PostIndex), lt);
9945 __ b(ge, &done);
9946 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
9947 __ b(&byte_loop);
9948
9949 __ bind(&done);
9950}
9951
9952
Steve Block6ded16b2010-05-10 14:33:55 +01009953void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
9954 Register c1,
9955 Register c2,
9956 Register scratch1,
9957 Register scratch2,
9958 Register scratch3,
9959 Register scratch4,
9960 Register scratch5,
9961 Label* not_found) {
9962 // Register scratch3 is the general scratch register in this function.
9963 Register scratch = scratch3;
9964
9965 // Make sure that both characters are not digits as such strings has a
9966 // different hash algorithm. Don't try to look for these in the symbol table.
9967 Label not_array_index;
9968 __ sub(scratch, c1, Operand(static_cast<int>('0')));
9969 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
9970 __ b(hi, &not_array_index);
9971 __ sub(scratch, c2, Operand(static_cast<int>('0')));
9972 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
9973
9974 // If check failed combine both characters into single halfword.
9975 // This is required by the contract of the method: code at the
9976 // not_found branch expects this combination in c1 register
9977 __ orr(c1, c1, Operand(c2, LSL, kBitsPerByte), LeaveCC, ls);
9978 __ b(ls, not_found);
9979
9980 __ bind(&not_array_index);
9981 // Calculate the two character string hash.
9982 Register hash = scratch1;
9983 StringHelper::GenerateHashInit(masm, hash, c1);
9984 StringHelper::GenerateHashAddCharacter(masm, hash, c2);
9985 StringHelper::GenerateHashGetHash(masm, hash);
9986
9987 // Collect the two characters in a register.
9988 Register chars = c1;
9989 __ orr(chars, chars, Operand(c2, LSL, kBitsPerByte));
9990
9991 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
9992 // hash: hash of two character string.
9993
9994 // Load symbol table
9995 // Load address of first element of the symbol table.
9996 Register symbol_table = c2;
9997 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
9998
9999 // Load undefined value
10000 Register undefined = scratch4;
10001 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
10002
10003 // Calculate capacity mask from the symbol table capacity.
10004 Register mask = scratch2;
10005 __ ldr(mask, FieldMemOperand(symbol_table, SymbolTable::kCapacityOffset));
10006 __ mov(mask, Operand(mask, ASR, 1));
10007 __ sub(mask, mask, Operand(1));
10008
10009 // Calculate untagged address of the first element of the symbol table.
10010 Register first_symbol_table_element = symbol_table;
10011 __ add(first_symbol_table_element, symbol_table,
10012 Operand(SymbolTable::kElementsStartOffset - kHeapObjectTag));
10013
10014 // Registers
10015 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10016 // hash: hash of two character string
10017 // mask: capacity mask
10018 // first_symbol_table_element: address of the first element of
10019 // the symbol table
10020 // scratch: -
10021
10022 // Perform a number of probes in the symbol table.
10023 static const int kProbes = 4;
10024 Label found_in_symbol_table;
10025 Label next_probe[kProbes];
10026 for (int i = 0; i < kProbes; i++) {
10027 Register candidate = scratch5; // Scratch register contains candidate.
10028
10029 // Calculate entry in symbol table.
10030 if (i > 0) {
10031 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
10032 } else {
10033 __ mov(candidate, hash);
10034 }
10035
10036 __ and_(candidate, candidate, Operand(mask));
10037
10038 // Load the entry from the symble table.
10039 ASSERT_EQ(1, SymbolTable::kEntrySize);
10040 __ ldr(candidate,
10041 MemOperand(first_symbol_table_element,
10042 candidate,
10043 LSL,
10044 kPointerSizeLog2));
10045
10046 // If entry is undefined no string with this hash can be found.
10047 __ cmp(candidate, undefined);
10048 __ b(eq, not_found);
10049
10050 // If length is not 2 the string is not a candidate.
10051 __ ldr(scratch, FieldMemOperand(candidate, String::kLengthOffset));
10052 __ cmp(scratch, Operand(Smi::FromInt(2)));
10053 __ b(ne, &next_probe[i]);
10054
10055 // Check that the candidate is a non-external ascii string.
10056 __ ldr(scratch, FieldMemOperand(candidate, HeapObject::kMapOffset));
10057 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
10058 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch,
10059 &next_probe[i]);
10060
10061 // Check if the two characters match.
10062 // Assumes that word load is little endian.
10063 __ ldrh(scratch, FieldMemOperand(candidate, SeqAsciiString::kHeaderSize));
10064 __ cmp(chars, scratch);
10065 __ b(eq, &found_in_symbol_table);
10066 __ bind(&next_probe[i]);
10067 }
10068
10069 // No matching 2 character string found by probing.
10070 __ jmp(not_found);
10071
10072 // Scratch register contains result when we fall through to here.
10073 Register result = scratch;
10074 __ bind(&found_in_symbol_table);
10075 __ Move(r0, result);
10076}
10077
10078
10079void StringHelper::GenerateHashInit(MacroAssembler* masm,
10080 Register hash,
10081 Register character) {
10082 // hash = character + (character << 10);
10083 __ add(hash, character, Operand(character, LSL, 10));
10084 // hash ^= hash >> 6;
10085 __ eor(hash, hash, Operand(hash, ASR, 6));
10086}
10087
10088
10089void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
10090 Register hash,
10091 Register character) {
10092 // hash += character;
10093 __ add(hash, hash, Operand(character));
10094 // hash += hash << 10;
10095 __ add(hash, hash, Operand(hash, LSL, 10));
10096 // hash ^= hash >> 6;
10097 __ eor(hash, hash, Operand(hash, ASR, 6));
10098}
10099
10100
10101void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
10102 Register hash) {
10103 // hash += hash << 3;
10104 __ add(hash, hash, Operand(hash, LSL, 3));
10105 // hash ^= hash >> 11;
10106 __ eor(hash, hash, Operand(hash, ASR, 11));
10107 // hash += hash << 15;
10108 __ add(hash, hash, Operand(hash, LSL, 15), SetCC);
10109
10110 // if (hash == 0) hash = 27;
10111 __ mov(hash, Operand(27), LeaveCC, nz);
10112}
10113
10114
Andrei Popescu31002712010-02-23 13:46:05 +000010115void SubStringStub::Generate(MacroAssembler* masm) {
10116 Label runtime;
10117
10118 // Stack frame on entry.
10119 // lr: return address
10120 // sp[0]: to
10121 // sp[4]: from
10122 // sp[8]: string
10123
10124 // This stub is called from the native-call %_SubString(...), so
10125 // nothing can be assumed about the arguments. It is tested that:
10126 // "string" is a sequential string,
10127 // both "from" and "to" are smis, and
10128 // 0 <= from <= to <= string.length.
10129 // If any of these assumptions fail, we call the runtime system.
10130
10131 static const int kToOffset = 0 * kPointerSize;
10132 static const int kFromOffset = 1 * kPointerSize;
10133 static const int kStringOffset = 2 * kPointerSize;
10134
10135
10136 // Check bounds and smi-ness.
10137 __ ldr(r7, MemOperand(sp, kToOffset));
10138 __ ldr(r6, MemOperand(sp, kFromOffset));
10139 ASSERT_EQ(0, kSmiTag);
10140 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
10141 // I.e., arithmetic shift right by one un-smi-tags.
10142 __ mov(r2, Operand(r7, ASR, 1), SetCC);
10143 __ mov(r3, Operand(r6, ASR, 1), SetCC, cc);
10144 // If either r2 or r6 had the smi tag bit set, then carry is set now.
10145 __ b(cs, &runtime); // Either "from" or "to" is not a smi.
10146 __ b(mi, &runtime); // From is negative.
10147
10148 __ sub(r2, r2, Operand(r3), SetCC);
10149 __ b(mi, &runtime); // Fail if from > to.
Steve Block6ded16b2010-05-10 14:33:55 +010010150 // Special handling of sub-strings of length 1 and 2. One character strings
10151 // are handled in the runtime system (looked up in the single character
10152 // cache). Two character strings are looked for in the symbol cache.
Andrei Popescu31002712010-02-23 13:46:05 +000010153 __ cmp(r2, Operand(2));
Steve Block6ded16b2010-05-10 14:33:55 +010010154 __ b(lt, &runtime);
Andrei Popescu31002712010-02-23 13:46:05 +000010155
10156 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010157 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010158 // r6: from (smi)
10159 // r7: to (smi)
10160
10161 // Make sure first argument is a sequential (or flat) string.
10162 __ ldr(r5, MemOperand(sp, kStringOffset));
10163 ASSERT_EQ(0, kSmiTag);
10164 __ tst(r5, Operand(kSmiTagMask));
10165 __ b(eq, &runtime);
10166 Condition is_string = masm->IsObjectStringType(r5, r1);
10167 __ b(NegateCondition(is_string), &runtime);
10168
10169 // r1: instance type
10170 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010171 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010172 // r5: string
10173 // r6: from (smi)
10174 // r7: to (smi)
10175 Label seq_string;
10176 __ and_(r4, r1, Operand(kStringRepresentationMask));
10177 ASSERT(kSeqStringTag < kConsStringTag);
10178 ASSERT(kExternalStringTag > kConsStringTag);
10179 __ cmp(r4, Operand(kConsStringTag));
10180 __ b(gt, &runtime); // External strings go to runtime.
10181 __ b(lt, &seq_string); // Sequential strings are handled directly.
10182
10183 // Cons string. Try to recurse (once) on the first substring.
10184 // (This adds a little more generality than necessary to handle flattened
10185 // cons strings, but not much).
10186 __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset));
10187 __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
10188 __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10189 __ tst(r1, Operand(kStringRepresentationMask));
10190 ASSERT_EQ(0, kSeqStringTag);
10191 __ b(ne, &runtime); // Cons and External strings go to runtime.
10192
10193 // Definitly a sequential string.
10194 __ bind(&seq_string);
10195
10196 // r1: instance type.
10197 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010198 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010199 // r5: string
10200 // r6: from (smi)
10201 // r7: to (smi)
10202 __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010203 __ cmp(r4, Operand(r7));
Andrei Popescu31002712010-02-23 13:46:05 +000010204 __ b(lt, &runtime); // Fail if to > length.
10205
10206 // r1: instance type.
10207 // r2: result string length.
Steve Block6ded16b2010-05-10 14:33:55 +010010208 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010209 // r5: string.
10210 // r6: from offset (smi)
10211 // Check for flat ascii string.
10212 Label non_ascii_flat;
10213 __ tst(r1, Operand(kStringEncodingMask));
10214 ASSERT_EQ(0, kTwoByteStringTag);
10215 __ b(eq, &non_ascii_flat);
10216
Steve Block6ded16b2010-05-10 14:33:55 +010010217 Label result_longer_than_two;
10218 __ cmp(r2, Operand(2));
10219 __ b(gt, &result_longer_than_two);
10220
10221 // Sub string of length 2 requested.
10222 // Get the two characters forming the sub string.
10223 __ add(r5, r5, Operand(r3));
10224 __ ldrb(r3, FieldMemOperand(r5, SeqAsciiString::kHeaderSize));
10225 __ ldrb(r4, FieldMemOperand(r5, SeqAsciiString::kHeaderSize + 1));
10226
10227 // Try to lookup two character string in symbol table.
10228 Label make_two_character_string;
10229 StringHelper::GenerateTwoCharacterSymbolTableProbe(
10230 masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
10231 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10232 __ add(sp, sp, Operand(3 * kPointerSize));
10233 __ Ret();
10234
10235 // r2: result string length.
10236 // r3: two characters combined into halfword in little endian byte order.
10237 __ bind(&make_two_character_string);
10238 __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
10239 __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
10240 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10241 __ add(sp, sp, Operand(3 * kPointerSize));
10242 __ Ret();
10243
10244 __ bind(&result_longer_than_two);
10245
Andrei Popescu31002712010-02-23 13:46:05 +000010246 // Allocate the result.
10247 __ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime);
10248
10249 // r0: result string.
10250 // r2: result string length.
10251 // r5: string.
10252 // r6: from offset (smi)
10253 // Locate first character of result.
10254 __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10255 // Locate 'from' character of string.
10256 __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10257 __ add(r5, r5, Operand(r6, ASR, 1));
10258
10259 // r0: result string.
10260 // r1: first character of result string.
10261 // r2: result string length.
10262 // r5: first character of sub string to copy.
10263 ASSERT_EQ(0, SeqAsciiString::kHeaderSize & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +010010264 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10265 COPY_ASCII | DEST_ALWAYS_ALIGNED);
Andrei Popescu31002712010-02-23 13:46:05 +000010266 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10267 __ add(sp, sp, Operand(3 * kPointerSize));
10268 __ Ret();
10269
10270 __ bind(&non_ascii_flat);
10271 // r2: result string length.
10272 // r5: string.
10273 // r6: from offset (smi)
10274 // Check for flat two byte string.
10275
10276 // Allocate the result.
10277 __ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime);
10278
10279 // r0: result string.
10280 // r2: result string length.
10281 // r5: string.
10282 // Locate first character of result.
10283 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10284 // Locate 'from' character of string.
10285 __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10286 // As "from" is a smi it is 2 times the value which matches the size of a two
10287 // byte character.
10288 __ add(r5, r5, Operand(r6));
10289
10290 // r0: result string.
10291 // r1: first character of result.
10292 // r2: result length.
10293 // r5: first character of string to copy.
10294 ASSERT_EQ(0, SeqTwoByteString::kHeaderSize & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +010010295 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10296 DEST_ALWAYS_ALIGNED);
Andrei Popescu31002712010-02-23 13:46:05 +000010297 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10298 __ add(sp, sp, Operand(3 * kPointerSize));
10299 __ Ret();
10300
10301 // Just jump to runtime to create the sub string.
10302 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010303 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Andrei Popescu31002712010-02-23 13:46:05 +000010304}
Leon Clarked91b9f72010-01-27 17:25:45 +000010305
10306
10307void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
10308 Register left,
10309 Register right,
10310 Register scratch1,
10311 Register scratch2,
10312 Register scratch3,
10313 Register scratch4) {
10314 Label compare_lengths;
10315 // Find minimum length and length difference.
10316 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
10317 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
10318 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
10319 Register length_delta = scratch3;
10320 __ mov(scratch1, scratch2, LeaveCC, gt);
10321 Register min_length = scratch1;
Steve Block6ded16b2010-05-10 14:33:55 +010010322 ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000010323 __ tst(min_length, Operand(min_length));
10324 __ b(eq, &compare_lengths);
10325
Steve Block6ded16b2010-05-10 14:33:55 +010010326 // Untag smi.
10327 __ mov(min_length, Operand(min_length, ASR, kSmiTagSize));
10328
Leon Clarked91b9f72010-01-27 17:25:45 +000010329 // Setup registers so that we only need to increment one register
10330 // in the loop.
10331 __ add(scratch2, min_length,
10332 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10333 __ add(left, left, Operand(scratch2));
10334 __ add(right, right, Operand(scratch2));
10335 // Registers left and right points to the min_length character of strings.
10336 __ rsb(min_length, min_length, Operand(-1));
10337 Register index = min_length;
10338 // Index starts at -min_length.
10339
10340 {
10341 // Compare loop.
10342 Label loop;
10343 __ bind(&loop);
10344 // Compare characters.
10345 __ add(index, index, Operand(1), SetCC);
10346 __ ldrb(scratch2, MemOperand(left, index), ne);
10347 __ ldrb(scratch4, MemOperand(right, index), ne);
10348 // Skip to compare lengths with eq condition true.
10349 __ b(eq, &compare_lengths);
10350 __ cmp(scratch2, scratch4);
10351 __ b(eq, &loop);
10352 // Fallthrough with eq condition false.
10353 }
10354 // Compare lengths - strings up to min-length are equal.
10355 __ bind(&compare_lengths);
10356 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
10357 // Use zero length_delta as result.
10358 __ mov(r0, Operand(length_delta), SetCC, eq);
10359 // Fall through to here if characters compare not-equal.
10360 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
10361 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
10362 __ Ret();
10363}
10364
10365
10366void StringCompareStub::Generate(MacroAssembler* masm) {
10367 Label runtime;
10368
10369 // Stack frame on entry.
Andrei Popescu31002712010-02-23 13:46:05 +000010370 // sp[0]: right string
10371 // sp[4]: left string
10372 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // left
10373 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // right
Leon Clarked91b9f72010-01-27 17:25:45 +000010374
10375 Label not_same;
10376 __ cmp(r0, r1);
10377 __ b(ne, &not_same);
10378 ASSERT_EQ(0, EQUAL);
10379 ASSERT_EQ(0, kSmiTag);
10380 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
10381 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
10382 __ add(sp, sp, Operand(2 * kPointerSize));
10383 __ Ret();
10384
10385 __ bind(&not_same);
10386
10387 // Check that both objects are sequential ascii strings.
10388 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime);
10389
10390 // Compare flat ascii strings natively. Remove arguments from stack first.
10391 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
10392 __ add(sp, sp, Operand(2 * kPointerSize));
10393 GenerateCompareFlatAsciiStrings(masm, r0, r1, r2, r3, r4, r5);
10394
10395 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
10396 // tagged as a small integer.
10397 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010398 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarked91b9f72010-01-27 17:25:45 +000010399}
10400
10401
Andrei Popescu31002712010-02-23 13:46:05 +000010402void StringAddStub::Generate(MacroAssembler* masm) {
10403 Label string_add_runtime;
10404 // Stack on entry:
10405 // sp[0]: second argument.
10406 // sp[4]: first argument.
10407
10408 // Load the two arguments.
10409 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument.
10410 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
10411
10412 // Make sure that both arguments are strings if not known in advance.
10413 if (string_check_) {
10414 ASSERT_EQ(0, kSmiTag);
10415 __ JumpIfEitherSmi(r0, r1, &string_add_runtime);
10416 // Load instance types.
10417 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
10418 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
10419 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10420 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
10421 ASSERT_EQ(0, kStringTag);
10422 // If either is not a string, go to runtime.
10423 __ tst(r4, Operand(kIsNotStringMask));
10424 __ tst(r5, Operand(kIsNotStringMask), eq);
10425 __ b(ne, &string_add_runtime);
10426 }
10427
10428 // Both arguments are strings.
10429 // r0: first string
10430 // r1: second string
10431 // r4: first string instance type (if string_check_)
10432 // r5: second string instance type (if string_check_)
10433 {
10434 Label strings_not_empty;
10435 // Check if either of the strings are empty. In that case return the other.
10436 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
10437 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010438 ASSERT(kSmiTag == 0);
10439 __ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty.
Andrei Popescu31002712010-02-23 13:46:05 +000010440 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
Steve Block6ded16b2010-05-10 14:33:55 +010010441 ASSERT(kSmiTag == 0);
10442 // Else test if second string is empty.
10443 __ cmp(r3, Operand(Smi::FromInt(0)), ne);
Andrei Popescu31002712010-02-23 13:46:05 +000010444 __ b(ne, &strings_not_empty); // If either string was empty, return r0.
10445
10446 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10447 __ add(sp, sp, Operand(2 * kPointerSize));
10448 __ Ret();
10449
10450 __ bind(&strings_not_empty);
10451 }
10452
Steve Block6ded16b2010-05-10 14:33:55 +010010453 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
10454 __ mov(r3, Operand(r3, ASR, kSmiTagSize));
Andrei Popescu31002712010-02-23 13:46:05 +000010455 // Both strings are non-empty.
10456 // r0: first string
10457 // r1: second string
10458 // r2: length of first string
10459 // r3: length of second string
10460 // r4: first string instance type (if string_check_)
10461 // r5: second string instance type (if string_check_)
10462 // Look at the length of the result of adding the two strings.
Steve Block6ded16b2010-05-10 14:33:55 +010010463 Label string_add_flat_result, longer_than_two;
Andrei Popescu31002712010-02-23 13:46:05 +000010464 // Adding two lengths can't overflow.
10465 ASSERT(String::kMaxLength * 2 > String::kMaxLength);
10466 __ add(r6, r2, Operand(r3));
10467 // Use the runtime system when adding two one character strings, as it
10468 // contains optimizations for this specific case using the symbol table.
10469 __ cmp(r6, Operand(2));
Steve Block6ded16b2010-05-10 14:33:55 +010010470 __ b(ne, &longer_than_two);
10471
10472 // Check that both strings are non-external ascii strings.
10473 if (!string_check_) {
10474 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
10475 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
10476 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10477 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
10478 }
10479 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r4, r5, r6, r7,
10480 &string_add_runtime);
10481
10482 // Get the two characters forming the sub string.
10483 __ ldrb(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
10484 __ ldrb(r3, FieldMemOperand(r1, SeqAsciiString::kHeaderSize));
10485
10486 // Try to lookup two character string in symbol table. If it is not found
10487 // just allocate a new one.
10488 Label make_two_character_string;
10489 StringHelper::GenerateTwoCharacterSymbolTableProbe(
10490 masm, r2, r3, r6, r7, r4, r5, r9, &make_two_character_string);
10491 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10492 __ add(sp, sp, Operand(2 * kPointerSize));
10493 __ Ret();
10494
10495 __ bind(&make_two_character_string);
10496 // Resulting string has length 2 and first chars of two strings
10497 // are combined into single halfword in r2 register.
10498 // So we can fill resulting string without two loops by a single
10499 // halfword store instruction (which assumes that processor is
10500 // in a little endian mode)
10501 __ mov(r6, Operand(2));
10502 __ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime);
10503 __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
10504 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10505 __ add(sp, sp, Operand(2 * kPointerSize));
10506 __ Ret();
10507
10508 __ bind(&longer_than_two);
Andrei Popescu31002712010-02-23 13:46:05 +000010509 // Check if resulting string will be flat.
10510 __ cmp(r6, Operand(String::kMinNonFlatLength));
10511 __ b(lt, &string_add_flat_result);
10512 // Handle exceptionally long strings in the runtime system.
10513 ASSERT((String::kMaxLength & 0x80000000) == 0);
10514 ASSERT(IsPowerOf2(String::kMaxLength + 1));
10515 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
10516 __ cmp(r6, Operand(String::kMaxLength + 1));
10517 __ b(hs, &string_add_runtime);
10518
10519 // If result is not supposed to be flat, allocate a cons string object.
10520 // If both strings are ascii the result is an ascii cons string.
10521 if (!string_check_) {
10522 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
10523 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
10524 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10525 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
10526 }
10527 Label non_ascii, allocated;
10528 ASSERT_EQ(0, kTwoByteStringTag);
10529 __ tst(r4, Operand(kStringEncodingMask));
10530 __ tst(r5, Operand(kStringEncodingMask), ne);
10531 __ b(eq, &non_ascii);
10532
10533 // Allocate an ASCII cons string.
10534 __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime);
10535 __ bind(&allocated);
10536 // Fill the fields of the cons string.
10537 __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
10538 __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
10539 __ mov(r0, Operand(r7));
10540 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10541 __ add(sp, sp, Operand(2 * kPointerSize));
10542 __ Ret();
10543
10544 __ bind(&non_ascii);
10545 // Allocate a two byte cons string.
10546 __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime);
10547 __ jmp(&allocated);
10548
10549 // Handle creating a flat result. First check that both strings are
10550 // sequential and that they have the same encoding.
10551 // r0: first string
10552 // r1: second string
10553 // r2: length of first string
10554 // r3: length of second string
10555 // r4: first string instance type (if string_check_)
10556 // r5: second string instance type (if string_check_)
10557 // r6: sum of lengths.
10558 __ bind(&string_add_flat_result);
10559 if (!string_check_) {
10560 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
10561 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
10562 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10563 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
10564 }
10565 // Check that both strings are sequential.
10566 ASSERT_EQ(0, kSeqStringTag);
10567 __ tst(r4, Operand(kStringRepresentationMask));
10568 __ tst(r5, Operand(kStringRepresentationMask), eq);
10569 __ b(ne, &string_add_runtime);
10570 // Now check if both strings have the same encoding (ASCII/Two-byte).
10571 // r0: first string.
10572 // r1: second string.
10573 // r2: length of first string.
10574 // r3: length of second string.
10575 // r6: sum of lengths..
10576 Label non_ascii_string_add_flat_result;
10577 ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test.
10578 __ eor(r7, r4, Operand(r5));
10579 __ tst(r7, Operand(kStringEncodingMask));
10580 __ b(ne, &string_add_runtime);
10581 // And see if it's ASCII or two-byte.
10582 __ tst(r4, Operand(kStringEncodingMask));
10583 __ b(eq, &non_ascii_string_add_flat_result);
10584
10585 // Both strings are sequential ASCII strings. We also know that they are
10586 // short (since the sum of the lengths is less than kMinNonFlatLength).
Steve Block6ded16b2010-05-10 14:33:55 +010010587 // r6: length of resulting flat string
Andrei Popescu31002712010-02-23 13:46:05 +000010588 __ AllocateAsciiString(r7, r6, r4, r5, r9, &string_add_runtime);
10589 // Locate first character of result.
10590 __ add(r6, r7, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10591 // Locate first character of first argument.
10592 __ add(r0, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10593 // r0: first character of first string.
10594 // r1: second string.
10595 // r2: length of first string.
10596 // r3: length of second string.
10597 // r6: first character of result.
10598 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010010599 StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, true);
Andrei Popescu31002712010-02-23 13:46:05 +000010600
10601 // Load second argument and locate first character.
10602 __ add(r1, r1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10603 // r1: first character of second string.
10604 // r3: length of second string.
10605 // r6: next character of result.
10606 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010010607 StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
Andrei Popescu31002712010-02-23 13:46:05 +000010608 __ mov(r0, Operand(r7));
10609 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10610 __ add(sp, sp, Operand(2 * kPointerSize));
10611 __ Ret();
10612
10613 __ bind(&non_ascii_string_add_flat_result);
10614 // Both strings are sequential two byte strings.
10615 // r0: first string.
10616 // r1: second string.
10617 // r2: length of first string.
10618 // r3: length of second string.
10619 // r6: sum of length of strings.
10620 __ AllocateTwoByteString(r7, r6, r4, r5, r9, &string_add_runtime);
10621 // r0: first string.
10622 // r1: second string.
10623 // r2: length of first string.
10624 // r3: length of second string.
10625 // r7: result string.
10626
10627 // Locate first character of result.
10628 __ add(r6, r7, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10629 // Locate first character of first argument.
10630 __ add(r0, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10631
10632 // r0: first character of first string.
10633 // r1: second string.
10634 // r2: length of first string.
10635 // r3: length of second string.
10636 // r6: first character of result.
10637 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010010638 StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, false);
Andrei Popescu31002712010-02-23 13:46:05 +000010639
10640 // Locate first character of second argument.
10641 __ add(r1, r1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10642
10643 // r1: first character of second string.
10644 // r3: length of second string.
10645 // r6: next character of result (after copy of first string).
10646 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010010647 StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
Andrei Popescu31002712010-02-23 13:46:05 +000010648
10649 __ mov(r0, Operand(r7));
10650 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
10651 __ add(sp, sp, Operand(2 * kPointerSize));
10652 __ Ret();
10653
10654 // Just jump to runtime to add the two strings.
10655 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010656 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Andrei Popescu31002712010-02-23 13:46:05 +000010657}
10658
10659
Steve Blocka7e24c12009-10-30 11:49:00 +000010660#undef __
10661
10662} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010010663
10664#endif // V8_TARGET_ARCH_ARM