blob: 0b2081bd7cefb956473c2a5dee15d9b2cdebb8d8 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010036#include "ic-inl.h"
37#include "jsregexp.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010038#include "jump-target-light-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010040#include "regexp-macro-assembler.h"
41#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000042#include "register-allocator-inl.h"
43#include "runtime.h"
44#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010045#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010046#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000047
48namespace v8 {
49namespace internal {
50
Kristian Monsen25f61362010-05-21 11:50:48 +010051
Steve Blocka7e24c12009-10-30 11:49:00 +000052static void EmitIdenticalObjectComparison(MacroAssembler* masm,
53 Label* slow,
Leon Clarkee46be812010-01-19 14:06:41 +000054 Condition cc,
55 bool never_nan_nan);
Steve Blocka7e24c12009-10-30 11:49:00 +000056static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +010057 Register lhs,
58 Register rhs,
Leon Clarkee46be812010-01-19 14:06:41 +000059 Label* lhs_not_nan,
Steve Blocka7e24c12009-10-30 11:49:00 +000060 Label* slow,
61 bool strict);
62static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc);
Ben Murdoch3bec4d22010-07-22 14:51:16 +010063static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
64 Register lhs,
65 Register rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +000066static void MultiplyByKnownInt(MacroAssembler* masm,
67 Register source,
68 Register destination,
69 int known_int);
70static bool IsEasyToMultiplyBy(int x);
71
72
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010073#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000074
75// -------------------------------------------------------------------------
76// Platform-specific DeferredCode functions.
77
78void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010079 // On ARM you either have a completely spilled frame or you
80 // handle it yourself, but at the moment there's no automation
81 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000082}
83
84
85void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010086}
87
88
89// -------------------------------------------------------------------------
90// Platform-specific RuntimeCallHelper functions.
91
92void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
93 frame_state_->frame()->AssertIsSpilled();
94}
95
96
97void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
98}
99
100
101void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
102 masm->EnterInternalFrame();
103}
104
105
106void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
107 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000108}
109
110
111// -------------------------------------------------------------------------
112// CodeGenState implementation.
113
114CodeGenState::CodeGenState(CodeGenerator* owner)
115 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100116 previous_(owner->state()) {
117 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000118}
119
120
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100121ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
122 JumpTarget* true_target,
123 JumpTarget* false_target)
124 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000125 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100126 false_target_(false_target) {
127 owner->set_state(this);
128}
129
130
131TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
132 Slot* slot,
133 TypeInfo type_info)
134 : CodeGenState(owner),
135 slot_(slot) {
136 owner->set_state(this);
137 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000138}
139
140
141CodeGenState::~CodeGenState() {
142 ASSERT(owner_->state() == this);
143 owner_->set_state(previous_);
144}
145
146
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100147TypeInfoCodeGenState::~TypeInfoCodeGenState() {
148 owner()->set_type_info(slot_, old_type_info_);
149}
150
Steve Blocka7e24c12009-10-30 11:49:00 +0000151// -------------------------------------------------------------------------
152// CodeGenerator implementation
153
Andrei Popescu31002712010-02-23 13:46:05 +0000154CodeGenerator::CodeGenerator(MacroAssembler* masm)
155 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000156 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000157 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000158 frame_(NULL),
159 allocator_(NULL),
160 cc_reg_(al),
161 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100162 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100163 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100164 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000165 function_return_is_shadowed_(false) {
166}
167
168
169// Calling conventions:
170// fp: caller's frame pointer
171// sp: stack pointer
172// r1: called JS function
173// cp: callee's context
174
Andrei Popescu402d9372010-02-26 13:31:12 +0000175void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000176 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000177 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100178 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000179
180 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000181 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100182
183 int slots = scope()->num_parameters() + scope()->num_stack_slots();
184 ScopedVector<TypeInfo> type_info_array(slots);
185 type_info_ = &type_info_array;
186
Steve Blocka7e24c12009-10-30 11:49:00 +0000187 ASSERT(allocator_ == NULL);
188 RegisterAllocator register_allocator(this);
189 allocator_ = &register_allocator;
190 ASSERT(frame_ == NULL);
191 frame_ = new VirtualFrame();
192 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100193
194 // Adjust for function-level loop nesting.
195 ASSERT_EQ(0, loop_nesting_);
196 loop_nesting_ = info->loop_nesting();
197
Steve Blocka7e24c12009-10-30 11:49:00 +0000198 {
199 CodeGenState state(this);
200
201 // Entry:
202 // Stack: receiver, arguments
203 // lr: return address
204 // fp: caller's frame pointer
205 // sp: stack pointer
206 // r1: called JS function
207 // cp: callee's context
208 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000209
Steve Blocka7e24c12009-10-30 11:49:00 +0000210#ifdef DEBUG
211 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000212 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000213 frame_->SpillAll();
214 __ stop("stop-at");
215 }
216#endif
217
Andrei Popescu402d9372010-02-26 13:31:12 +0000218 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000219 frame_->Enter();
220 // tos: code slot
221
222 // Allocate space for locals and initialize them. This also checks
223 // for stack overflow.
224 frame_->AllocateStackSlots();
225
Steve Block8defd9f2010-07-08 12:39:36 +0100226 frame_->AssertIsSpilled();
Kristian Monsen25f61362010-05-21 11:50:48 +0100227 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000228 if (heap_slots > 0) {
229 // Allocate local context.
230 // Get outer context and create a new context based on it.
231 __ ldr(r0, frame_->Function());
232 frame_->EmitPush(r0);
233 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
234 FastNewContextStub stub(heap_slots);
235 frame_->CallStub(&stub, 1);
236 } else {
237 frame_->CallRuntime(Runtime::kNewContext, 1);
238 }
239
240#ifdef DEBUG
241 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +0100242 __ cmp(r0, cp);
Leon Clarke4515c472010-02-03 11:58:03 +0000243 verified_true.Branch(eq);
244 __ stop("NewContext: r0 is expected to be the same as cp");
245 verified_true.Bind();
246#endif
247 // Update context local.
248 __ str(cp, frame_->Context());
249 }
250
251 // TODO(1241774): Improve this code:
252 // 1) only needed if we have a context
253 // 2) no need to recompute context ptr every single time
254 // 3) don't copy parameter operand code from SlotOperand!
255 {
256 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000257 // Note that iteration order is relevant here! If we have the same
258 // parameter twice (e.g., function (x, y, x)), and that parameter
259 // needs to be copied into the context, it must be the last argument
260 // passed to the parameter that needs to be copied. This is a rare
261 // case so we don't check for it, instead we rely on the copying
262 // order: such a parameter is copied repeatedly into the same
263 // context location and thus the last value is what is seen inside
264 // the function.
Steve Block8defd9f2010-07-08 12:39:36 +0100265 frame_->AssertIsSpilled();
Andrei Popescu31002712010-02-23 13:46:05 +0000266 for (int i = 0; i < scope()->num_parameters(); i++) {
267 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000268 Slot* slot = par->slot();
269 if (slot != NULL && slot->type() == Slot::CONTEXT) {
Andrei Popescu31002712010-02-23 13:46:05 +0000270 ASSERT(!scope()->is_global_scope()); // No params in global scope.
Leon Clarke4515c472010-02-03 11:58:03 +0000271 __ ldr(r1, frame_->ParameterAt(i));
272 // Loads r2 with context; used below in RecordWrite.
273 __ str(r1, SlotOperand(slot, r2));
274 // Load the offset into r3.
275 int slot_offset =
276 FixedArray::kHeaderSize + slot->index() * kPointerSize;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100277 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000278 }
279 }
280 }
281
282 // Store the arguments object. This must happen after context
Steve Block6ded16b2010-05-10 14:33:55 +0100283 // initialization because the arguments object may be stored in
284 // the context.
285 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
286 StoreArgumentsObject(true);
Leon Clarke4515c472010-02-03 11:58:03 +0000287 }
288
289 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000290 if (scope()->is_function_scope() && scope()->function() != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +0100291 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Andrei Popescu31002712010-02-23 13:46:05 +0000292 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000293 }
294 } else {
295 // When used as the secondary compiler for splitting, r1, cp,
296 // fp, and lr have been pushed on the stack. Adjust the virtual
297 // frame to match this state.
298 frame_->Adjust(4);
Andrei Popescu402d9372010-02-26 13:31:12 +0000299
300 // Bind all the bailout labels to the beginning of the function.
301 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
302 for (int i = 0; i < bailouts->length(); i++) {
303 __ bind(bailouts->at(i)->label());
304 }
Leon Clarke4515c472010-02-03 11:58:03 +0000305 }
306
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 // Initialize the function return target after the locals are set
308 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100309 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000310 function_return_is_shadowed_ = false;
311
Steve Blocka7e24c12009-10-30 11:49:00 +0000312 // Generate code to 'execute' declarations and initialize functions
313 // (source elements). In case of an illegal redeclaration we need to
314 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000315 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000317 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000318 } else {
319 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000320 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000321 // Bail out if a stack-overflow exception occurred when processing
322 // declarations.
323 if (HasStackOverflow()) return;
324 }
325
326 if (FLAG_trace) {
327 frame_->CallRuntime(Runtime::kTraceEnter, 0);
328 // Ignore the return value.
329 }
330
331 // Compile the body of the function in a vanilla state. Don't
332 // bother compiling all the code if the scope has an illegal
333 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000334 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000335 Comment cmnt(masm_, "[ function body");
336#ifdef DEBUG
337 bool is_builtin = Bootstrapper::IsActive();
338 bool should_trace =
339 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
340 if (should_trace) {
341 frame_->CallRuntime(Runtime::kDebugTrace, 0);
342 // Ignore the return value.
343 }
344#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100345 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000346 }
347 }
348
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100349 // Handle the return from the function.
350 if (has_valid_frame()) {
351 // If there is a valid frame, control flow can fall off the end of
352 // the body. In that case there is an implicit return statement.
353 ASSERT(!function_return_is_shadowed_);
354 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000355 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100356 if (function_return_.is_bound()) {
357 function_return_.Jump();
358 } else {
359 function_return_.Bind();
360 GenerateReturnSequence();
361 }
362 } else if (function_return_.is_linked()) {
363 // If the return target has dangling jumps to it, then we have not
364 // yet generated the return sequence. This can happen when (a)
365 // control does not flow off the end of the body so we did not
366 // compile an artificial return statement just above, and (b) there
367 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000368 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100369 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000370 }
371
Steve Block6ded16b2010-05-10 14:33:55 +0100372 // Adjust for function-level loop nesting.
373 ASSERT(loop_nesting_ == info->loop_nesting());
374 loop_nesting_ = 0;
375
Steve Blocka7e24c12009-10-30 11:49:00 +0000376 // Code generation state must be reset.
377 ASSERT(!has_cc());
378 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100379 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000380 ASSERT(!function_return_is_shadowed_);
381 function_return_.Unuse();
382 DeleteFrame();
383
384 // Process any deferred code using the register allocator.
385 if (!HasStackOverflow()) {
386 ProcessDeferred();
387 }
388
389 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100390 type_info_ = NULL;
391}
392
393
394int CodeGenerator::NumberOfSlot(Slot* slot) {
395 if (slot == NULL) return kInvalidSlotNumber;
396 switch (slot->type()) {
397 case Slot::PARAMETER:
398 return slot->index();
399 case Slot::LOCAL:
400 return slot->index() + scope()->num_parameters();
401 default:
402 break;
403 }
404 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000405}
406
407
408MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
409 // Currently, this assertion will fail if we try to assign to
410 // a constant variable that is constant because it is read-only
411 // (such as the variable referring to a named function expression).
412 // We need to implement assignments to read-only variables.
413 // Ideally, we should do this during AST generation (by converting
414 // such assignments into expression statements); however, in general
415 // we may not be able to make the decision until past AST generation,
416 // that is when the entire program is known.
417 ASSERT(slot != NULL);
418 int index = slot->index();
419 switch (slot->type()) {
420 case Slot::PARAMETER:
421 return frame_->ParameterAt(index);
422
423 case Slot::LOCAL:
424 return frame_->LocalAt(index);
425
426 case Slot::CONTEXT: {
427 // Follow the context chain if necessary.
428 ASSERT(!tmp.is(cp)); // do not overwrite context register
429 Register context = cp;
430 int chain_length = scope()->ContextChainLength(slot->var()->scope());
431 for (int i = 0; i < chain_length; i++) {
432 // Load the closure.
433 // (All contexts, even 'with' contexts, have a closure,
434 // and it is the same for all contexts inside a function.
435 // There is no need to go to the function context first.)
436 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
437 // Load the function context (which is the incoming, outer context).
438 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
439 context = tmp;
440 }
441 // We may have a 'with' context now. Get the function context.
442 // (In fact this mov may never be the needed, since the scope analysis
443 // may not permit a direct context access in this case and thus we are
444 // always at a function context. However it is safe to dereference be-
445 // cause the function context of a function context is itself. Before
446 // deleting this mov we should try to create a counter-example first,
447 // though...)
448 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
449 return ContextOperand(tmp, index);
450 }
451
452 default:
453 UNREACHABLE();
454 return MemOperand(r0, 0);
455 }
456}
457
458
459MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
460 Slot* slot,
461 Register tmp,
462 Register tmp2,
463 JumpTarget* slow) {
464 ASSERT(slot->type() == Slot::CONTEXT);
465 Register context = cp;
466
467 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
468 if (s->num_heap_slots() > 0) {
469 if (s->calls_eval()) {
470 // Check that extension is NULL.
471 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
472 __ tst(tmp2, tmp2);
473 slow->Branch(ne);
474 }
475 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
476 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
477 context = tmp;
478 }
479 }
480 // Check that last extension is NULL.
481 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
482 __ tst(tmp2, tmp2);
483 slow->Branch(ne);
484 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
485 return ContextOperand(tmp, slot->index());
486}
487
488
489// Loads a value on TOS. If it is a boolean value, the result may have been
490// (partially) translated into branches, or it may have set the condition
491// code register. If force_cc is set, the value is forced to set the
492// condition code register and no value is pushed. If the condition code
493// register was set, has_cc() is true and cc_reg_ contains the condition to
494// test for 'true'.
495void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000496 JumpTarget* true_target,
497 JumpTarget* false_target,
498 bool force_cc) {
499 ASSERT(!has_cc());
500 int original_height = frame_->height();
501
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100502 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000503 Visit(x);
504
505 // If we hit a stack overflow, we may not have actually visited
506 // the expression. In that case, we ensure that we have a
507 // valid-looking frame state because we will continue to generate
508 // code as we unwind the C++ stack.
509 //
510 // It's possible to have both a stack overflow and a valid frame
511 // state (eg, a subexpression overflowed, visiting it returned
512 // with a dummied frame state, and visiting this expression
513 // returned with a normal-looking state).
514 if (HasStackOverflow() &&
515 has_valid_frame() &&
516 !has_cc() &&
517 frame_->height() == original_height) {
518 true_target->Jump();
519 }
520 }
521 if (force_cc && frame_ != NULL && !has_cc()) {
522 // Convert the TOS value to a boolean in the condition code register.
523 ToBoolean(true_target, false_target);
524 }
525 ASSERT(!force_cc || !has_valid_frame() || has_cc());
526 ASSERT(!has_valid_frame() ||
527 (has_cc() && frame_->height() == original_height) ||
528 (!has_cc() && frame_->height() == original_height + 1));
529}
530
531
Steve Blockd0582a62009-12-15 09:54:21 +0000532void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000533#ifdef DEBUG
534 int original_height = frame_->height();
535#endif
536 JumpTarget true_target;
537 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000538 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000539
540 if (has_cc()) {
541 // Convert cc_reg_ into a boolean value.
542 JumpTarget loaded;
543 JumpTarget materialize_true;
544 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100545 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000546 loaded.Jump();
547 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100548 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000549 loaded.Bind();
550 cc_reg_ = al;
551 }
552
553 if (true_target.is_linked() || false_target.is_linked()) {
554 // We have at least one condition value that has been "translated"
555 // into a branch, thus it needs to be loaded explicitly.
556 JumpTarget loaded;
557 if (frame_ != NULL) {
558 loaded.Jump(); // Don't lose the current TOS.
559 }
560 bool both = true_target.is_linked() && false_target.is_linked();
561 // Load "true" if necessary.
562 if (true_target.is_linked()) {
563 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100564 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000565 }
566 // If both "true" and "false" need to be loaded jump across the code for
567 // "false".
568 if (both) {
569 loaded.Jump();
570 }
571 // Load "false" if necessary.
572 if (false_target.is_linked()) {
573 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100574 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575 }
576 // A value is loaded on all paths reaching this point.
577 loaded.Bind();
578 }
579 ASSERT(has_valid_frame());
580 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100581 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000582}
583
584
585void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100586 Register reg = frame_->GetTOSRegister();
587 __ ldr(reg, GlobalObject());
588 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000589}
590
591
592void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100593 Register reg = frame_->GetTOSRegister();
594 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
595 __ ldr(reg,
596 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
597 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000598}
599
600
Steve Block6ded16b2010-05-10 14:33:55 +0100601ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
602 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
603 ASSERT(scope()->arguments_shadow() != NULL);
604 // We don't want to do lazy arguments allocation for functions that
605 // have heap-allocated contexts, because it interfers with the
606 // uninitialized const tracking in the context objects.
607 return (scope()->num_heap_slots() > 0)
608 ? EAGER_ARGUMENTS_ALLOCATION
609 : LAZY_ARGUMENTS_ALLOCATION;
610}
611
612
613void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100614 ArgumentsAllocationMode mode = ArgumentsMode();
615 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
616
617 Comment cmnt(masm_, "[ store arguments object");
618 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
619 // When using lazy arguments allocation, we store the hole value
620 // as a sentinel indicating that the arguments object hasn't been
621 // allocated yet.
Steve Block8defd9f2010-07-08 12:39:36 +0100622 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100623 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100624 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +0100625 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
626 __ ldr(r2, frame_->Function());
627 // The receiver is below the arguments, the return address, and the
628 // frame pointer on the stack.
629 const int kReceiverDisplacement = 2 + scope()->num_parameters();
630 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
631 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
632 frame_->Adjust(3);
633 __ Push(r2, r1, r0);
634 frame_->CallStub(&stub, 3);
635 frame_->EmitPush(r0);
636 }
637
638 Variable* arguments = scope()->arguments()->var();
639 Variable* shadow = scope()->arguments_shadow()->var();
640 ASSERT(arguments != NULL && arguments->slot() != NULL);
641 ASSERT(shadow != NULL && shadow->slot() != NULL);
642 JumpTarget done;
643 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
644 // We have to skip storing into the arguments slot if it has
645 // already been written to. This can happen if the a function
646 // has a local variable named 'arguments'.
647 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100648 Register arguments = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +0100649 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100650 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100651 done.Branch(ne);
652 }
653 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
654 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
655 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
656}
657
658
Steve Blockd0582a62009-12-15 09:54:21 +0000659void CodeGenerator::LoadTypeofExpression(Expression* expr) {
660 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000661 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000662 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000663 // For a global variable we build the property reference
664 // <global>.<variable> and perform a (regular non-contextual) property
665 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000666 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
667 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000668 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000669 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100670 ref.GetValue();
Steve Blockd0582a62009-12-15 09:54:21 +0000671 } else if (variable != NULL && variable->slot() != NULL) {
672 // For a variable that rewrites to a slot, we signal it is the immediate
673 // subexpression of a typeof.
Steve Block6ded16b2010-05-10 14:33:55 +0100674 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000675 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000676 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100677 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000678 }
679}
680
681
Leon Clarked91b9f72010-01-27 17:25:45 +0000682Reference::Reference(CodeGenerator* cgen,
683 Expression* expression,
684 bool persist_after_get)
685 : cgen_(cgen),
686 expression_(expression),
687 type_(ILLEGAL),
688 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000689 cgen->LoadReference(this);
690}
691
692
693Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000694 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000695}
696
697
698void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 Comment cmnt(masm_, "[ LoadReference");
700 Expression* e = ref->expression();
701 Property* property = e->AsProperty();
702 Variable* var = e->AsVariableProxy()->AsVariable();
703
704 if (property != NULL) {
705 // The expression is either a property or a variable proxy that rewrites
706 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100707 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000708 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 ref->set_type(Reference::NAMED);
710 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100711 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000712 ref->set_type(Reference::KEYED);
713 }
714 } else if (var != NULL) {
715 // The expression is a variable proxy that does not rewrite to a
716 // property. Global variables are treated as named property references.
717 if (var->is_global()) {
718 LoadGlobal();
719 ref->set_type(Reference::NAMED);
720 } else {
721 ASSERT(var->slot() != NULL);
722 ref->set_type(Reference::SLOT);
723 }
724 } else {
725 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100726 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000727 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
728 }
729}
730
731
732void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000734 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100735 if (size == 0) return;
736
737 // Pop a reference from the stack while preserving TOS.
738 VirtualFrame::RegisterAllocationScope scope(this);
739 Comment cmnt(masm_, "[ UnloadReference");
740 if (size > 0) {
741 Register tos = frame_->PopToRegister();
742 frame_->Drop(size);
743 frame_->EmitPush(tos);
744 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000745}
746
747
748// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
749// register to a boolean in the condition code register. The code
750// may jump to 'false_target' in case the register converts to 'false'.
751void CodeGenerator::ToBoolean(JumpTarget* true_target,
752 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000753 // Note: The generated code snippet does not change stack variables.
754 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100755 bool known_smi = frame_->KnownSmiAt(0);
756 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000757
758 // Fast case checks
759
760 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100761 if (!known_smi) {
762 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
763 __ cmp(tos, ip);
764 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000765
Steve Block8defd9f2010-07-08 12:39:36 +0100766 // Check if the value is 'true'.
767 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
768 __ cmp(tos, ip);
769 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000770
Steve Block8defd9f2010-07-08 12:39:36 +0100771 // Check if the value is 'undefined'.
772 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
773 __ cmp(tos, ip);
774 false_target->Branch(eq);
775 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000776
777 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100778 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000779
Steve Block8defd9f2010-07-08 12:39:36 +0100780 if (!known_smi) {
781 false_target->Branch(eq);
782 __ tst(tos, Operand(kSmiTagMask));
783 true_target->Branch(eq);
784
785 // Slow case: call the runtime.
786 frame_->EmitPush(tos);
787 frame_->CallRuntime(Runtime::kToBool, 1);
788 // Convert the result (r0) to a condition code.
789 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
790 __ cmp(r0, ip);
791 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000792
793 cc_reg_ = ne;
794}
795
796
797void CodeGenerator::GenericBinaryOperation(Token::Value op,
798 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100799 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000800 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100801 // top of virtual frame: y
802 // 2nd elt. on virtual frame : x
803 // result : top of virtual frame
804
805 // Stub is entered with a call: 'return address' is in lr.
806 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100807 case Token::ADD:
808 case Token::SUB:
809 if (inline_smi) {
810 JumpTarget done;
811 Register rhs = frame_->PopToRegister();
812 Register lhs = frame_->PopToRegister(rhs);
813 Register scratch = VirtualFrame::scratch0();
814 __ orr(scratch, rhs, Operand(lhs));
815 // Check they are both small and positive.
816 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
817 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
818 ASSERT_EQ(0, kSmiTag);
819 if (op == Token::ADD) {
820 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
821 } else {
822 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
823 }
824 done.Branch(eq);
825 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
826 frame_->SpillAll();
827 frame_->CallStub(&stub, 0);
828 done.Bind();
829 frame_->EmitPush(r0);
830 break;
831 } else {
832 // Fall through!
833 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000834 case Token::BIT_OR:
835 case Token::BIT_AND:
836 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100837 if (inline_smi) {
838 bool rhs_is_smi = frame_->KnownSmiAt(0);
839 bool lhs_is_smi = frame_->KnownSmiAt(1);
840 Register rhs = frame_->PopToRegister();
841 Register lhs = frame_->PopToRegister(rhs);
842 Register smi_test_reg;
843 Condition cond;
844 if (!rhs_is_smi || !lhs_is_smi) {
845 if (rhs_is_smi) {
846 smi_test_reg = lhs;
847 } else if (lhs_is_smi) {
848 smi_test_reg = rhs;
849 } else {
850 smi_test_reg = VirtualFrame::scratch0();
851 __ orr(smi_test_reg, rhs, Operand(lhs));
852 }
853 // Check they are both Smis.
854 __ tst(smi_test_reg, Operand(kSmiTagMask));
855 cond = eq;
856 } else {
857 cond = al;
858 }
859 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
860 if (op == Token::BIT_OR) {
861 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
862 } else if (op == Token::BIT_AND) {
863 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
864 } else {
865 ASSERT(op == Token::BIT_XOR);
866 ASSERT_EQ(0, kSmiTag);
867 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
868 }
869 if (cond != al) {
870 JumpTarget done;
871 done.Branch(cond);
872 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
873 frame_->SpillAll();
874 frame_->CallStub(&stub, 0);
875 done.Bind();
876 }
877 frame_->EmitPush(r0);
878 break;
879 } else {
880 // Fall through!
881 }
882 case Token::MUL:
883 case Token::DIV:
884 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000885 case Token::SHL:
886 case Token::SHR:
887 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100888 Register rhs = frame_->PopToRegister();
889 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100890 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
891 frame_->SpillAll();
892 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100893 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 break;
895 }
896
Steve Block6ded16b2010-05-10 14:33:55 +0100897 case Token::COMMA: {
898 Register scratch = frame_->PopToRegister();
899 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000900 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100901 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000902 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100903 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000904
905 default:
906 // Other cases should have been handled before this point.
907 UNREACHABLE();
908 break;
909 }
910}
911
912
913class DeferredInlineSmiOperation: public DeferredCode {
914 public:
915 DeferredInlineSmiOperation(Token::Value op,
916 int value,
917 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100918 OverwriteMode overwrite_mode,
919 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 : op_(op),
921 value_(value),
922 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100923 overwrite_mode_(overwrite_mode),
924 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000925 set_comment("[ DeferredInlinedSmiOperation");
926 }
927
928 virtual void Generate();
929
930 private:
931 Token::Value op_;
932 int value_;
933 bool reversed_;
934 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100935 Register tos_register_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000936};
937
938
Steve Block8defd9f2010-07-08 12:39:36 +0100939
940// On entry the non-constant side of the binary operation is in tos_register_
941// and the constant smi side is nowhere. The tos_register_ is not used by the
942// virtual frame. On exit the answer is in the tos_register_ and the virtual
943// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000944void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100945 VirtualFrame copied_frame(*frame_state()->frame());
946 copied_frame.SpillAll();
947
Steve Block6ded16b2010-05-10 14:33:55 +0100948 Register lhs = r1;
949 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 switch (op_) {
951 case Token::ADD: {
952 // Revert optimistic add.
953 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100954 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 __ mov(r1, Operand(Smi::FromInt(value_)));
956 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100957 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000958 __ mov(r0, Operand(Smi::FromInt(value_)));
959 }
960 break;
961 }
962
963 case Token::SUB: {
964 // Revert optimistic sub.
965 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100966 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 __ mov(r1, Operand(Smi::FromInt(value_)));
968 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100969 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000970 __ mov(r0, Operand(Smi::FromInt(value_)));
971 }
972 break;
973 }
974
975 // For these operations there is no optimistic operation that needs to be
976 // reverted.
977 case Token::MUL:
978 case Token::MOD:
979 case Token::BIT_OR:
980 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +0100981 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +0000982 case Token::SHL:
983 case Token::SHR:
984 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +0100985 if (tos_register_.is(r1)) {
986 __ mov(r0, Operand(Smi::FromInt(value_)));
987 } else {
988 ASSERT(tos_register_.is(r0));
989 __ mov(r1, Operand(Smi::FromInt(value_)));
990 }
991 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +0100992 lhs = r0;
993 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +0000994 }
995 break;
996 }
997
998 default:
999 // Other cases should have been handled before this point.
1000 UNREACHABLE();
1001 break;
1002 }
1003
Steve Block6ded16b2010-05-10 14:33:55 +01001004 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001005 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001006
Steve Block6ded16b2010-05-10 14:33:55 +01001007 // The generic stub returns its value in r0, but that's not
1008 // necessarily what we want. We want whatever the inlined code
1009 // expected, which is that the answer is in the same register as
1010 // the operand was.
1011 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001012
1013 // The tos register was not in use for the virtual frame that we
1014 // came into this function with, so we can merge back to that frame
1015 // without trashing it.
1016 copied_frame.MergeTo(frame_state()->frame());
Steve Blocka7e24c12009-10-30 11:49:00 +00001017}
1018
1019
1020static bool PopCountLessThanEqual2(unsigned int x) {
1021 x &= x - 1;
1022 return (x & (x - 1)) == 0;
1023}
1024
1025
1026// Returns the index of the lowest bit set.
1027static int BitPosition(unsigned x) {
1028 int bit_posn = 0;
1029 while ((x & 0xf) == 0) {
1030 bit_posn += 4;
1031 x >>= 4;
1032 }
1033 while ((x & 1) == 0) {
1034 bit_posn++;
1035 x >>= 1;
1036 }
1037 return bit_posn;
1038}
1039
1040
1041void CodeGenerator::SmiOperation(Token::Value op,
1042 Handle<Object> value,
1043 bool reversed,
1044 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001045 int int_value = Smi::cast(*value)->value();
1046
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001047 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1048
Steve Block6ded16b2010-05-10 14:33:55 +01001049 bool something_to_inline;
1050 switch (op) {
1051 case Token::ADD:
1052 case Token::SUB:
1053 case Token::BIT_AND:
1054 case Token::BIT_OR:
1055 case Token::BIT_XOR: {
1056 something_to_inline = true;
1057 break;
1058 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001059 case Token::SHL: {
1060 something_to_inline = (both_sides_are_smi || !reversed);
1061 break;
1062 }
Steve Block6ded16b2010-05-10 14:33:55 +01001063 case Token::SHR:
1064 case Token::SAR: {
1065 if (reversed) {
1066 something_to_inline = false;
1067 } else {
1068 something_to_inline = true;
1069 }
1070 break;
1071 }
1072 case Token::MOD: {
1073 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1074 something_to_inline = false;
1075 } else {
1076 something_to_inline = true;
1077 }
1078 break;
1079 }
1080 case Token::MUL: {
1081 if (!IsEasyToMultiplyBy(int_value)) {
1082 something_to_inline = false;
1083 } else {
1084 something_to_inline = true;
1085 }
1086 break;
1087 }
1088 default: {
1089 something_to_inline = false;
1090 break;
1091 }
1092 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001093
Steve Block6ded16b2010-05-10 14:33:55 +01001094 if (!something_to_inline) {
1095 if (!reversed) {
1096 // Push the rhs onto the virtual frame by putting it in a TOS register.
1097 Register rhs = frame_->GetTOSRegister();
1098 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001099 frame_->EmitPush(rhs, TypeInfo::Smi());
1100 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001101 } else {
1102 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1103 // at most one pop, the rest takes place in TOS registers.
1104 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1105 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1106 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001107 frame_->EmitPush(lhs, TypeInfo::Smi());
1108 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1109 frame_->EmitPush(rhs, t);
1110 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001111 }
1112 return;
1113 }
1114
1115 // We move the top of stack to a register (normally no move is invoved).
1116 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001117 switch (op) {
1118 case Token::ADD: {
1119 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001120 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001121
Steve Block6ded16b2010-05-10 14:33:55 +01001122 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001123 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001124 if (!both_sides_are_smi) {
1125 __ tst(tos, Operand(kSmiTagMask));
1126 deferred->Branch(ne);
1127 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001128 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001129 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001130 break;
1131 }
1132
1133 case Token::SUB: {
1134 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001135 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001136
1137 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001138 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001139 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001140 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001141 }
1142 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001143 if (!both_sides_are_smi) {
1144 __ tst(tos, Operand(kSmiTagMask));
1145 deferred->Branch(ne);
1146 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001147 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001148 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001149 break;
1150 }
1151
1152
1153 case Token::BIT_OR:
1154 case Token::BIT_XOR:
1155 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001156 if (both_sides_are_smi) {
1157 switch (op) {
1158 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1159 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001160 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001161 default: UNREACHABLE();
1162 }
1163 frame_->EmitPush(tos, TypeInfo::Smi());
1164 } else {
1165 DeferredCode* deferred =
1166 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1167 __ tst(tos, Operand(kSmiTagMask));
1168 deferred->Branch(ne);
1169 switch (op) {
1170 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1171 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001172 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001173 default: UNREACHABLE();
1174 }
1175 deferred->BindExit();
1176 TypeInfo result_type =
1177 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
1178 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001179 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001180 break;
1181 }
1182
1183 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001184 if (reversed) {
1185 ASSERT(both_sides_are_smi);
1186 int max_shift = 0;
1187 int max_result = int_value == 0 ? 1 : int_value;
1188 while (Smi::IsValid(max_result << 1)) {
1189 max_shift++;
1190 max_result <<= 1;
1191 }
1192 DeferredCode* deferred =
1193 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1194 // Mask off the last 5 bits of the shift operand (rhs). This is part
1195 // of the definition of shift in JS and we know we have a Smi so we
1196 // can safely do this. The masked version gets passed to the
1197 // deferred code, but that makes no difference.
1198 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1199 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1200 deferred->Branch(ge);
1201 Register scratch = VirtualFrame::scratch0();
1202 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1203 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1204 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1205 deferred->BindExit();
1206 TypeInfo result = TypeInfo::Integer32();
1207 frame_->EmitPush(tos, result);
1208 break;
1209 }
1210 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001211 case Token::SHR:
1212 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001213 ASSERT(!reversed);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001214 TypeInfo result = TypeInfo::Integer32();
Steve Block6ded16b2010-05-10 14:33:55 +01001215 Register scratch = VirtualFrame::scratch0();
1216 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00001217 int shift_value = int_value & 0x1f; // least significant 5 bits
1218 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001219 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Kristian Monsen25f61362010-05-21 11:50:48 +01001220 uint32_t problematic_mask = kSmiTagMask;
1221 // For unsigned shift by zero all negative smis are problematic.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001222 bool skip_smi_test = both_sides_are_smi;
1223 if (shift_value == 0 && op == Token::SHR) {
1224 problematic_mask |= 0x80000000;
1225 skip_smi_test = false;
1226 }
1227 if (!skip_smi_test) {
1228 __ tst(tos, Operand(problematic_mask));
1229 deferred->Branch(ne); // Go slow for problematic input.
1230 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001231 switch (op) {
1232 case Token::SHL: {
1233 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001234 int adjusted_shift = shift_value - kSmiTagSize;
1235 ASSERT(adjusted_shift >= 0);
1236 if (adjusted_shift != 0) {
1237 __ mov(scratch, Operand(tos, LSL, adjusted_shift));
1238 // Check that the *signed* result fits in a smi.
1239 __ add(scratch2, scratch, Operand(0x40000000), SetCC);
1240 deferred->Branch(mi);
1241 __ mov(tos, Operand(scratch, LSL, kSmiTagSize));
1242 } else {
1243 // Check that the *signed* result fits in a smi.
1244 __ add(scratch2, tos, Operand(0x40000000), SetCC);
1245 deferred->Branch(mi);
1246 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
1247 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001249 break;
1250 }
1251 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001252 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001253 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Remove tag.
1254 // LSR by immediate 0 means shifting 32 bits.
Steve Block6ded16b2010-05-10 14:33:55 +01001255 __ mov(scratch, Operand(scratch, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001256 if (shift_value == 1) {
1257 // check that the *unsigned* result fits in a smi
1258 // neither of the two high-order bits can be set:
1259 // - 0x80000000: high bit would be lost when smi tagging
1260 // - 0x40000000: this number would convert to negative when
1261 // smi tagging these two cases can only happen with shifts
1262 // by 0 or 1 when handed a valid smi
1263 __ tst(scratch, Operand(0xc0000000));
1264 deferred->Branch(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001265 } else {
1266 ASSERT(shift_value >= 2);
1267 result = TypeInfo::Smi(); // SHR by at least 2 gives a Smi.
Kristian Monsen25f61362010-05-21 11:50:48 +01001268 }
1269 __ mov(tos, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001270 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001271 break;
1272 }
1273 case Token::SAR: {
Kristian Monsen25f61362010-05-21 11:50:48 +01001274 // In the ARM instructions set, ASR by immediate 0 means shifting 32
1275 // bits.
Steve Blocka7e24c12009-10-30 11:49:00 +00001276 if (shift_value != 0) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001277 // Do the shift and the tag removal in one operation. If the shift
1278 // is 31 bits (the highest possible value) then we emit the
1279 // instruction as a shift by 0 which means shift arithmetically by
1280 // 32.
1281 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
1282 // Put tag back.
1283 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001284 // SAR by at least 1 gives a Smi.
1285 result = TypeInfo::Smi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001286 }
1287 break;
1288 }
1289 default: UNREACHABLE();
1290 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001292 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001293 break;
1294 }
1295
1296 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001297 ASSERT(!reversed);
1298 ASSERT(int_value >= 2);
1299 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001301 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001302 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001303 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1305 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001306 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001307 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001308 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1309 frame_->EmitPush(
1310 tos,
1311 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001312 break;
1313 }
1314
1315 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001316 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001317 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001318 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001319 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1320 max_smi_that_wont_overflow <<= kSmiTagSize;
1321 unsigned mask = 0x80000000u;
1322 while ((mask & max_smi_that_wont_overflow) == 0) {
1323 mask |= mask >> 1;
1324 }
1325 mask |= kSmiTagMask;
1326 // This does a single mask that checks for a too high value in a
1327 // conservative way and for a non-Smi. It also filters out negative
1328 // numbers, unfortunately, but since this code is inline we prefer
1329 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001330 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001331 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001332 MultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001333 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001334 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001335 break;
1336 }
1337
1338 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001339 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001340 break;
1341 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001342}
1343
1344
1345void CodeGenerator::Comparison(Condition cc,
1346 Expression* left,
1347 Expression* right,
1348 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001349 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001350
Steve Block6ded16b2010-05-10 14:33:55 +01001351 if (left != NULL) Load(left);
1352 if (right != NULL) Load(right);
1353
Steve Blocka7e24c12009-10-30 11:49:00 +00001354 // sp[0] : y
1355 // sp[1] : x
1356 // result : cc register
1357
1358 // Strict only makes sense for equality comparisons.
1359 ASSERT(!strict || cc == eq);
1360
Steve Block6ded16b2010-05-10 14:33:55 +01001361 Register lhs;
1362 Register rhs;
1363
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001364 bool lhs_is_smi;
1365 bool rhs_is_smi;
1366
Steve Block6ded16b2010-05-10 14:33:55 +01001367 // We load the top two stack positions into registers chosen by the virtual
1368 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001369 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1370 if (cc == gt || cc == le) {
1371 cc = ReverseCondition(cc);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001372 lhs_is_smi = frame_->KnownSmiAt(0);
1373 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001374 lhs = frame_->PopToRegister();
1375 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001376 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001377 rhs_is_smi = frame_->KnownSmiAt(0);
1378 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001379 rhs = frame_->PopToRegister();
1380 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 }
Steve Block6ded16b2010-05-10 14:33:55 +01001382
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001383 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1384
Steve Block6ded16b2010-05-10 14:33:55 +01001385 ASSERT(rhs.is(r0) || rhs.is(r1));
1386 ASSERT(lhs.is(r0) || lhs.is(r1));
1387
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001388 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001389
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001390 if (!both_sides_are_smi) {
1391 // Now we have the two sides in r0 and r1. We flush any other registers
1392 // because the stub doesn't know about register allocation.
1393 frame_->SpillAll();
1394 Register scratch = VirtualFrame::scratch0();
1395 Register smi_test_reg;
1396 if (lhs_is_smi) {
1397 smi_test_reg = rhs;
1398 } else if (rhs_is_smi) {
1399 smi_test_reg = lhs;
1400 } else {
1401 __ orr(scratch, lhs, Operand(rhs));
1402 smi_test_reg = scratch;
1403 }
1404 __ tst(smi_test_reg, Operand(kSmiTagMask));
1405 JumpTarget smi;
1406 smi.Branch(eq);
1407
1408 // Perform non-smi comparison by stub.
1409 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1410 // We call with 0 args because there are 0 on the stack.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001411 CompareStub stub(cc, strict, kBothCouldBeNaN, true, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001412 frame_->CallStub(&stub, 0);
1413 __ cmp(r0, Operand(0));
1414 exit.Jump();
1415
1416 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001417 }
1418
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001420 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001421
1422 exit.Bind();
1423 cc_reg_ = cc;
1424}
1425
1426
Steve Blocka7e24c12009-10-30 11:49:00 +00001427// Call the function on the stack with the given arguments.
1428void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001429 CallFunctionFlags flags,
1430 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001431 // Push the arguments ("left-to-right") on the stack.
1432 int arg_count = args->length();
1433 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001434 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001435 }
1436
1437 // Record the position for debugging purposes.
1438 CodeForSourcePosition(position);
1439
1440 // Use the shared code stub to call the function.
1441 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001442 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001443 frame_->CallStub(&call_function, arg_count + 1);
1444
1445 // Restore context and pop function from the stack.
1446 __ ldr(cp, frame_->Context());
1447 frame_->Drop(); // discard the TOS
1448}
1449
1450
Steve Block6ded16b2010-05-10 14:33:55 +01001451void CodeGenerator::CallApplyLazy(Expression* applicand,
1452 Expression* receiver,
1453 VariableProxy* arguments,
1454 int position) {
1455 // An optimized implementation of expressions of the form
1456 // x.apply(y, arguments).
1457 // If the arguments object of the scope has not been allocated,
1458 // and x.apply is Function.prototype.apply, this optimization
1459 // just copies y and the arguments of the current function on the
1460 // stack, as receiver and arguments, and calls x.
1461 // In the implementation comments, we call x the applicand
1462 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001463
1464 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1465 ASSERT(arguments->IsArguments());
1466
1467 // Load applicand.apply onto the stack. This will usually
1468 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001469 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001470 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001471 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001472 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1473 frame_->EmitPush(r0);
1474
1475 // Load the receiver and the existing arguments object onto the
1476 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001477 Load(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01001478 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
1479
Steve Block8defd9f2010-07-08 12:39:36 +01001480 // At this point the top two stack elements are probably in registers
1481 // since they were just loaded. Ensure they are in regs and get the
1482 // regs.
1483 Register receiver_reg = frame_->Peek2();
1484 Register arguments_reg = frame_->Peek();
1485
1486 // From now on the frame is spilled.
1487 frame_->SpillAll();
1488
Steve Block6ded16b2010-05-10 14:33:55 +01001489 // Emit the source position information after having loaded the
1490 // receiver and the arguments.
1491 CodeForSourcePosition(position);
1492 // Contents of the stack at this point:
1493 // sp[0]: arguments object of the current function or the hole.
1494 // sp[1]: receiver
1495 // sp[2]: applicand.apply
1496 // sp[3]: applicand.
1497
1498 // Check if the arguments object has been lazily allocated
1499 // already. If so, just use that instead of copying the arguments
1500 // from the stack. This also deals with cases where a local variable
1501 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001502 JumpTarget slow;
1503 Label done;
Steve Block6ded16b2010-05-10 14:33:55 +01001504 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001505 __ cmp(ip, arguments_reg);
1506 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001507
1508 Label build_args;
1509 // Get rid of the arguments object probe.
1510 frame_->Drop();
1511 // Stack now has 3 elements on it.
1512 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001513 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001514 // sp[1]: applicand.apply
1515 // sp[2]: applicand.
1516
1517 // Check that the receiver really is a JavaScript object.
Steve Block8defd9f2010-07-08 12:39:36 +01001518 __ BranchOnSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001519 // We allow all JSObjects including JSFunctions. As long as
1520 // JS_FUNCTION_TYPE is the last instance type and it is right
1521 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1522 // bound.
1523 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1524 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001525 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001526 __ b(lt, &build_args);
1527
1528 // Check that applicand.apply is Function.prototype.apply.
1529 __ ldr(r0, MemOperand(sp, kPointerSize));
1530 __ BranchOnSmi(r0, &build_args);
1531 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1532 __ b(ne, &build_args);
1533 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
1534 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
1535 __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset));
1536 __ cmp(r1, Operand(apply_code));
1537 __ b(ne, &build_args);
1538
1539 // Check that applicand is a function.
1540 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1541 __ BranchOnSmi(r1, &build_args);
1542 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1543 __ b(ne, &build_args);
1544
1545 // Copy the arguments to this function possibly from the
1546 // adaptor frame below it.
1547 Label invoke, adapted;
1548 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1549 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1550 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1551 __ b(eq, &adapted);
1552
1553 // No arguments adaptor frame. Copy fixed number of arguments.
1554 __ mov(r0, Operand(scope()->num_parameters()));
1555 for (int i = 0; i < scope()->num_parameters(); i++) {
1556 __ ldr(r2, frame_->ParameterAt(i));
1557 __ push(r2);
1558 }
1559 __ jmp(&invoke);
1560
1561 // Arguments adaptor frame present. Copy arguments from there, but
1562 // avoid copying too many arguments to avoid stack overflows.
1563 __ bind(&adapted);
1564 static const uint32_t kArgumentsLimit = 1 * KB;
1565 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1566 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1567 __ mov(r3, r0);
1568 __ cmp(r0, Operand(kArgumentsLimit));
1569 __ b(gt, &build_args);
1570
1571 // Loop through the arguments pushing them onto the execution
1572 // stack. We don't inform the virtual frame of the push, so we don't
1573 // have to worry about getting rid of the elements from the virtual
1574 // frame.
1575 Label loop;
1576 // r3 is a small non-negative integer, due to the test above.
1577 __ cmp(r3, Operand(0));
1578 __ b(eq, &invoke);
1579 // Compute the address of the first argument.
1580 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1581 __ add(r2, r2, Operand(kPointerSize));
1582 __ bind(&loop);
1583 // Post-decrement argument address by kPointerSize on each iteration.
1584 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1585 __ push(r4);
1586 __ sub(r3, r3, Operand(1), SetCC);
1587 __ b(gt, &loop);
1588
1589 // Invoke the function.
1590 __ bind(&invoke);
1591 ParameterCount actual(r0);
1592 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1593 // Drop applicand.apply and applicand from the stack, and push
1594 // the result of the function call, but leave the spilled frame
1595 // unchanged, with 3 elements, so it is correct when we compile the
1596 // slow-case code.
1597 __ add(sp, sp, Operand(2 * kPointerSize));
1598 __ push(r0);
1599 // Stack now has 1 element:
1600 // sp[0]: result
1601 __ jmp(&done);
1602
1603 // Slow-case: Allocate the arguments object since we know it isn't
1604 // there, and fall-through to the slow-case where we call
1605 // applicand.apply.
1606 __ bind(&build_args);
1607 // Stack now has 3 elements, because we have jumped from where:
1608 // sp[0]: receiver
1609 // sp[1]: applicand.apply
1610 // sp[2]: applicand.
1611 StoreArgumentsObject(false);
1612
1613 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001614 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001615
1616 // Generic computation of x.apply(y, args) with no special optimization.
1617 // Flip applicand.apply and applicand on the stack, so
1618 // applicand looks like the receiver of the applicand.apply call.
1619 // Then process it as a normal function call.
1620 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1621 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001622 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001623
1624 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1625 frame_->CallStub(&call_function, 3);
1626 // The function and its two arguments have been dropped.
1627 frame_->Drop(); // Drop the receiver as well.
1628 frame_->EmitPush(r0);
1629 // Stack now has 1 element:
1630 // sp[0]: result
1631 __ bind(&done);
1632
1633 // Restore the context register after a call.
1634 __ ldr(cp, frame_->Context());
1635}
1636
1637
Steve Blocka7e24c12009-10-30 11:49:00 +00001638void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001639 ASSERT(has_cc());
1640 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1641 target->Branch(cc);
1642 cc_reg_ = al;
1643}
1644
1645
1646void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001647 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001648 Comment cmnt(masm_, "[ check stack");
1649 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1650 // Put the lr setup instruction in the delay slot. kInstrSize is added to
1651 // the implicit 8 byte offset that always applies to operations with pc and
1652 // gives a return address 12 bytes down.
1653 masm_->add(lr, pc, Operand(Assembler::kInstrSize));
1654 masm_->cmp(sp, Operand(ip));
1655 StackCheckStub stub;
1656 // Call the stub if lower.
1657 masm_->mov(pc,
1658 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1659 RelocInfo::CODE_TARGET),
1660 LeaveCC,
1661 lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001662}
1663
1664
1665void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1666#ifdef DEBUG
1667 int original_height = frame_->height();
1668#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001669 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001670 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001671 }
1672 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1673}
1674
1675
1676void CodeGenerator::VisitBlock(Block* node) {
1677#ifdef DEBUG
1678 int original_height = frame_->height();
1679#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001680 Comment cmnt(masm_, "[ Block");
1681 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001682 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001683 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001684 if (node->break_target()->is_linked()) {
1685 node->break_target()->Bind();
1686 }
1687 node->break_target()->Unuse();
1688 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1689}
1690
1691
1692void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001693 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001694 frame_->EmitPush(Operand(pairs));
1695 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1696
Steve Blocka7e24c12009-10-30 11:49:00 +00001697 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1698 // The result is discarded.
1699}
1700
1701
1702void CodeGenerator::VisitDeclaration(Declaration* node) {
1703#ifdef DEBUG
1704 int original_height = frame_->height();
1705#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001706 Comment cmnt(masm_, "[ Declaration");
1707 Variable* var = node->proxy()->var();
1708 ASSERT(var != NULL); // must have been resolved
1709 Slot* slot = var->slot();
1710
1711 // If it was not possible to allocate the variable at compile time,
1712 // we need to "declare" it at runtime to make sure it actually
1713 // exists in the local context.
1714 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1715 // Variables with a "LOOKUP" slot were introduced as non-locals
1716 // during variable resolution and must have mode DYNAMIC.
1717 ASSERT(var->is_dynamic());
1718 // For now, just do a runtime call.
1719 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001720 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001721 // Declaration nodes are always declared in only two modes.
1722 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1723 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001724 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001725 // Push initial value, if any.
1726 // Note: For variables we must not push an initial value (such as
1727 // 'undefined') because we may have a (legal) redeclaration and we
1728 // must not destroy the current value.
1729 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001730 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001731 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001732 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001733 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001734 frame_->EmitPush(Operand(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001735 }
Steve Block6ded16b2010-05-10 14:33:55 +01001736
Steve Blocka7e24c12009-10-30 11:49:00 +00001737 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1738 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001739
Steve Blocka7e24c12009-10-30 11:49:00 +00001740 ASSERT(frame_->height() == original_height);
1741 return;
1742 }
1743
1744 ASSERT(!var->is_global());
1745
1746 // If we have a function or a constant, we need to initialize the variable.
1747 Expression* val = NULL;
1748 if (node->mode() == Variable::CONST) {
1749 val = new Literal(Factory::the_hole_value());
1750 } else {
1751 val = node->fun(); // NULL if we don't have a function
1752 }
1753
Steve Block8defd9f2010-07-08 12:39:36 +01001754
Steve Blocka7e24c12009-10-30 11:49:00 +00001755 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01001756 WriteBarrierCharacter wb_info =
1757 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
1758 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01001759 // Set initial value.
1760 Reference target(this, node->proxy());
1761 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01001762 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01001763
Steve Blocka7e24c12009-10-30 11:49:00 +00001764 // Get rid of the assigned value (declarations are statements).
1765 frame_->Drop();
1766 }
1767 ASSERT(frame_->height() == original_height);
1768}
1769
1770
1771void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
1772#ifdef DEBUG
1773 int original_height = frame_->height();
1774#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001775 Comment cmnt(masm_, "[ ExpressionStatement");
1776 CodeForStatementPosition(node);
1777 Expression* expression = node->expression();
1778 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001779 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00001780 frame_->Drop();
1781 ASSERT(frame_->height() == original_height);
1782}
1783
1784
1785void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
1786#ifdef DEBUG
1787 int original_height = frame_->height();
1788#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001789 Comment cmnt(masm_, "// EmptyStatement");
1790 CodeForStatementPosition(node);
1791 // nothing to do
1792 ASSERT(frame_->height() == original_height);
1793}
1794
1795
1796void CodeGenerator::VisitIfStatement(IfStatement* node) {
1797#ifdef DEBUG
1798 int original_height = frame_->height();
1799#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001800 Comment cmnt(masm_, "[ IfStatement");
1801 // Generate different code depending on which parts of the if statement
1802 // are present or not.
1803 bool has_then_stm = node->HasThenStatement();
1804 bool has_else_stm = node->HasElseStatement();
1805
1806 CodeForStatementPosition(node);
1807
1808 JumpTarget exit;
1809 if (has_then_stm && has_else_stm) {
1810 Comment cmnt(masm_, "[ IfThenElse");
1811 JumpTarget then;
1812 JumpTarget else_;
1813 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001814 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001815 if (frame_ != NULL) {
1816 Branch(false, &else_);
1817 }
1818 // then
1819 if (frame_ != NULL || then.is_linked()) {
1820 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001821 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 }
1823 if (frame_ != NULL) {
1824 exit.Jump();
1825 }
1826 // else
1827 if (else_.is_linked()) {
1828 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001829 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001830 }
1831
1832 } else if (has_then_stm) {
1833 Comment cmnt(masm_, "[ IfThen");
1834 ASSERT(!has_else_stm);
1835 JumpTarget then;
1836 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001837 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 if (frame_ != NULL) {
1839 Branch(false, &exit);
1840 }
1841 // then
1842 if (frame_ != NULL || then.is_linked()) {
1843 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001844 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001845 }
1846
1847 } else if (has_else_stm) {
1848 Comment cmnt(masm_, "[ IfElse");
1849 ASSERT(!has_then_stm);
1850 JumpTarget else_;
1851 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001852 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001853 if (frame_ != NULL) {
1854 Branch(true, &exit);
1855 }
1856 // else
1857 if (frame_ != NULL || else_.is_linked()) {
1858 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001859 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00001860 }
1861
1862 } else {
1863 Comment cmnt(masm_, "[ If");
1864 ASSERT(!has_then_stm && !has_else_stm);
1865 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001866 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 if (frame_ != NULL) {
1868 if (has_cc()) {
1869 cc_reg_ = al;
1870 } else {
1871 frame_->Drop();
1872 }
1873 }
1874 }
1875
1876 // end
1877 if (exit.is_linked()) {
1878 exit.Bind();
1879 }
1880 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1881}
1882
1883
1884void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001885 Comment cmnt(masm_, "[ ContinueStatement");
1886 CodeForStatementPosition(node);
1887 node->target()->continue_target()->Jump();
1888}
1889
1890
1891void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001892 Comment cmnt(masm_, "[ BreakStatement");
1893 CodeForStatementPosition(node);
1894 node->target()->break_target()->Jump();
1895}
1896
1897
1898void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Block8defd9f2010-07-08 12:39:36 +01001899 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00001900 Comment cmnt(masm_, "[ ReturnStatement");
1901
1902 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001903 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00001904 if (function_return_is_shadowed_) {
1905 frame_->EmitPop(r0);
1906 function_return_.Jump();
1907 } else {
1908 // Pop the result from the frame and prepare the frame for
1909 // returning thus making it easier to merge.
Steve Block8defd9f2010-07-08 12:39:36 +01001910 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00001911 frame_->PrepareForReturn();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001912 if (function_return_.is_bound()) {
1913 // If the function return label is already bound we reuse the
1914 // code by jumping to the return site.
1915 function_return_.Jump();
1916 } else {
1917 function_return_.Bind();
1918 GenerateReturnSequence();
1919 }
1920 }
1921}
Steve Blocka7e24c12009-10-30 11:49:00 +00001922
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001923
1924void CodeGenerator::GenerateReturnSequence() {
1925 if (FLAG_trace) {
1926 // Push the return value on the stack as the parameter.
1927 // Runtime::TraceExit returns the parameter as it is.
1928 frame_->EmitPush(r0);
1929 frame_->CallRuntime(Runtime::kTraceExit, 1);
1930 }
1931
1932#ifdef DEBUG
1933 // Add a label for checking the size of the code used for returning.
1934 Label check_exit_codesize;
1935 masm_->bind(&check_exit_codesize);
1936#endif
1937 // Make sure that the constant pool is not emitted inside of the return
1938 // sequence.
1939 { Assembler::BlockConstPoolScope block_const_pool(masm_);
1940 // Tear down the frame which will restore the caller's frame pointer and
1941 // the link register.
1942 frame_->Exit();
1943
1944 // Here we use masm_-> instead of the __ macro to avoid the code coverage
1945 // tool from instrumenting as we rely on the code size here.
1946 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
1947 masm_->add(sp, sp, Operand(sp_delta));
1948 masm_->Jump(lr);
1949 DeleteFrame();
1950
1951#ifdef DEBUG
1952 // Check that the size of the code used for returning matches what is
1953 // expected by the debugger. If the sp_delts above cannot be encoded in
1954 // the add instruction the add will generate two instructions.
1955 int return_sequence_length =
1956 masm_->InstructionsGeneratedSince(&check_exit_codesize);
1957 CHECK(return_sequence_length ==
1958 Assembler::kJSReturnSequenceInstructions ||
1959 return_sequence_length ==
1960 Assembler::kJSReturnSequenceInstructions + 1);
1961#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001962 }
1963}
1964
1965
1966void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1967#ifdef DEBUG
1968 int original_height = frame_->height();
1969#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001970 Comment cmnt(masm_, "[ WithEnterStatement");
1971 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001972 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00001973 if (node->is_catch_block()) {
1974 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1975 } else {
1976 frame_->CallRuntime(Runtime::kPushContext, 1);
1977 }
1978#ifdef DEBUG
1979 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01001980 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 verified_true.Branch(eq);
1982 __ stop("PushContext: r0 is expected to be the same as cp");
1983 verified_true.Bind();
1984#endif
1985 // Update context local.
1986 __ str(cp, frame_->Context());
1987 ASSERT(frame_->height() == original_height);
1988}
1989
1990
1991void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1992#ifdef DEBUG
1993 int original_height = frame_->height();
1994#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 Comment cmnt(masm_, "[ WithExitStatement");
1996 CodeForStatementPosition(node);
1997 // Pop context.
1998 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
1999 // Update context local.
2000 __ str(cp, frame_->Context());
2001 ASSERT(frame_->height() == original_height);
2002}
2003
2004
2005void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2006#ifdef DEBUG
2007 int original_height = frame_->height();
2008#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002009 Comment cmnt(masm_, "[ SwitchStatement");
2010 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002011 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002012
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002013 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002014
2015 JumpTarget next_test;
2016 JumpTarget fall_through;
2017 JumpTarget default_entry;
2018 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2019 ZoneList<CaseClause*>* cases = node->cases();
2020 int length = cases->length();
2021 CaseClause* default_clause = NULL;
2022
2023 for (int i = 0; i < length; i++) {
2024 CaseClause* clause = cases->at(i);
2025 if (clause->is_default()) {
2026 // Remember the default clause and compile it at the end.
2027 default_clause = clause;
2028 continue;
2029 }
2030
2031 Comment cmnt(masm_, "[ Case clause");
2032 // Compile the test.
2033 next_test.Bind();
2034 next_test.Unuse();
2035 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002036 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002037 Comparison(eq, NULL, clause->label(), true);
2038 Branch(false, &next_test);
2039
2040 // Before entering the body from the test, remove the switch value from
2041 // the stack.
2042 frame_->Drop();
2043
2044 // Label the body so that fall through is enabled.
2045 if (i > 0 && cases->at(i - 1)->is_default()) {
2046 default_exit.Bind();
2047 } else {
2048 fall_through.Bind();
2049 fall_through.Unuse();
2050 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002051 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002052
2053 // If control flow can fall through from the body, jump to the next body
2054 // or the end of the statement.
2055 if (frame_ != NULL) {
2056 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2057 default_entry.Jump();
2058 } else {
2059 fall_through.Jump();
2060 }
2061 }
2062 }
2063
2064 // The final "test" removes the switch value.
2065 next_test.Bind();
2066 frame_->Drop();
2067
2068 // If there is a default clause, compile it.
2069 if (default_clause != NULL) {
2070 Comment cmnt(masm_, "[ Default clause");
2071 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002072 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002073 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002074 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002075 if (frame_ != NULL && default_exit.is_bound()) {
2076 default_exit.Jump();
2077 }
2078 }
2079
2080 if (fall_through.is_linked()) {
2081 fall_through.Bind();
2082 }
2083
2084 if (node->break_target()->is_linked()) {
2085 node->break_target()->Bind();
2086 }
2087 node->break_target()->Unuse();
2088 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2089}
2090
2091
Steve Block3ce2e202009-11-05 08:53:23 +00002092void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002093#ifdef DEBUG
2094 int original_height = frame_->height();
2095#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002096 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002097 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002098 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002099 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002100 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002101
Steve Block3ce2e202009-11-05 08:53:23 +00002102 // Label the top of the loop for the backward CFG edge. If the test
2103 // is always true we can use the continue target, and if the test is
2104 // always false there is no need.
2105 ConditionAnalysis info = AnalyzeCondition(node->cond());
2106 switch (info) {
2107 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002108 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002109 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002110 break;
2111 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002112 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002113 break;
2114 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002115 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002116 body.Bind();
2117 break;
2118 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002119
Steve Block3ce2e202009-11-05 08:53:23 +00002120 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002121 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002122
Steve Blockd0582a62009-12-15 09:54:21 +00002123 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002124 switch (info) {
2125 case ALWAYS_TRUE:
2126 // If control can fall off the end of the body, jump back to the
2127 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002128 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002129 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002130 }
2131 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002132 case ALWAYS_FALSE:
2133 // If we have a continue in the body, we only have to bind its
2134 // jump target.
2135 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002136 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002137 }
Steve Block3ce2e202009-11-05 08:53:23 +00002138 break;
2139 case DONT_KNOW:
2140 // We have to compile the test expression if it can be reached by
2141 // control flow falling out of the body or via continue.
2142 if (node->continue_target()->is_linked()) {
2143 node->continue_target()->Bind();
2144 }
2145 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002146 Comment cmnt(masm_, "[ DoWhileCondition");
2147 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002148 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002149 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002150 // A invalid frame here indicates that control did not
2151 // fall out of the test expression.
2152 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002153 }
2154 }
2155 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002156 }
2157
2158 if (node->break_target()->is_linked()) {
2159 node->break_target()->Bind();
2160 }
Steve Block6ded16b2010-05-10 14:33:55 +01002161 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002162 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2163}
2164
2165
2166void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2167#ifdef DEBUG
2168 int original_height = frame_->height();
2169#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002170 Comment cmnt(masm_, "[ WhileStatement");
2171 CodeForStatementPosition(node);
2172
2173 // If the test is never true and has no side effects there is no need
2174 // to compile the test or body.
2175 ConditionAnalysis info = AnalyzeCondition(node->cond());
2176 if (info == ALWAYS_FALSE) return;
2177
Kristian Monsen25f61362010-05-21 11:50:48 +01002178 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002179 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002180
2181 // Label the top of the loop with the continue target for the backward
2182 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002183 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002184 node->continue_target()->Bind();
2185
2186 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002187 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002188 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002189 if (has_valid_frame()) {
2190 // A NULL frame indicates that control did not fall out of the
2191 // test expression.
2192 Branch(false, node->break_target());
2193 }
2194 if (has_valid_frame() || body.is_linked()) {
2195 body.Bind();
2196 }
2197 }
2198
2199 if (has_valid_frame()) {
2200 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002201 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002202
2203 // If control flow can fall out of the body, jump back to the top.
2204 if (has_valid_frame()) {
2205 node->continue_target()->Jump();
2206 }
2207 }
2208 if (node->break_target()->is_linked()) {
2209 node->break_target()->Bind();
2210 }
Steve Block6ded16b2010-05-10 14:33:55 +01002211 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002212 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2213}
2214
2215
2216void CodeGenerator::VisitForStatement(ForStatement* node) {
2217#ifdef DEBUG
2218 int original_height = frame_->height();
2219#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002220 Comment cmnt(masm_, "[ ForStatement");
2221 CodeForStatementPosition(node);
2222 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002223 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002224 }
2225
2226 // If the test is never true there is no need to compile the test or
2227 // body.
2228 ConditionAnalysis info = AnalyzeCondition(node->cond());
2229 if (info == ALWAYS_FALSE) return;
2230
Kristian Monsen25f61362010-05-21 11:50:48 +01002231 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002232 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002233
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002234 // We know that the loop index is a smi if it is not modified in the
2235 // loop body and it is checked against a constant limit in the loop
2236 // condition. In this case, we reset the static type information of the
2237 // loop index to smi before compiling the body, the update expression, and
2238 // the bottom check of the loop condition.
2239 TypeInfoCodeGenState type_info_scope(this,
2240 node->is_fast_smi_loop() ?
2241 node->loop_variable()->slot() :
2242 NULL,
2243 TypeInfo::Smi());
2244
Steve Block3ce2e202009-11-05 08:53:23 +00002245 // If there is no update statement, label the top of the loop with the
2246 // continue target, otherwise with the loop target.
2247 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2248 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002249 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002250 node->continue_target()->Bind();
2251 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002252 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002253 loop.Bind();
2254 }
2255
2256 // If the test is always true, there is no need to compile it.
2257 if (info == DONT_KNOW) {
2258 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002259 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002260 if (has_valid_frame()) {
2261 Branch(false, node->break_target());
2262 }
2263 if (has_valid_frame() || body.is_linked()) {
2264 body.Bind();
2265 }
2266 }
2267
2268 if (has_valid_frame()) {
2269 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002270 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002271
2272 if (node->next() == NULL) {
2273 // If there is no update statement and control flow can fall out
2274 // of the loop, jump directly to the continue label.
2275 if (has_valid_frame()) {
2276 node->continue_target()->Jump();
2277 }
2278 } else {
2279 // If there is an update statement and control flow can reach it
2280 // via falling out of the body of the loop or continuing, we
2281 // compile the update statement.
2282 if (node->continue_target()->is_linked()) {
2283 node->continue_target()->Bind();
2284 }
2285 if (has_valid_frame()) {
2286 // Record source position of the statement as this code which is
2287 // after the code for the body actually belongs to the loop
2288 // statement and not the body.
2289 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002290 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002291 loop.Jump();
2292 }
2293 }
2294 }
2295 if (node->break_target()->is_linked()) {
2296 node->break_target()->Bind();
2297 }
Steve Block6ded16b2010-05-10 14:33:55 +01002298 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002299 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2300}
2301
2302
2303void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2304#ifdef DEBUG
2305 int original_height = frame_->height();
2306#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002307 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002308 Comment cmnt(masm_, "[ ForInStatement");
2309 CodeForStatementPosition(node);
2310
2311 JumpTarget primitive;
2312 JumpTarget jsobject;
2313 JumpTarget fixed_array;
2314 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2315 JumpTarget end_del_check;
2316 JumpTarget exit;
2317
2318 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002319 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002320
2321 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2322 // to the specification. 12.6.4 mandates a call to ToObject.
2323 frame_->EmitPop(r0);
2324 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2325 __ cmp(r0, ip);
2326 exit.Branch(eq);
2327 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2328 __ cmp(r0, ip);
2329 exit.Branch(eq);
2330
2331 // Stack layout in body:
2332 // [iteration counter (Smi)]
2333 // [length of array]
2334 // [FixedArray]
2335 // [Map or 0]
2336 // [Object]
2337
2338 // Check if enumerable is already a JSObject
2339 __ tst(r0, Operand(kSmiTagMask));
2340 primitive.Branch(eq);
2341 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2342 jsobject.Branch(hs);
2343
2344 primitive.Bind();
2345 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002346 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002347
2348 jsobject.Bind();
2349 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002350 // r0: value to be iterated over
2351 frame_->EmitPush(r0); // Push the object being iterated over.
2352
2353 // Check cache validity in generated code. This is a fast case for
2354 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2355 // guarantee cache validity, call the runtime system to check cache
2356 // validity or get the property names in a fixed array.
2357 JumpTarget call_runtime;
2358 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2359 JumpTarget check_prototype;
2360 JumpTarget use_cache;
2361 __ mov(r1, Operand(r0));
2362 loop.Bind();
2363 // Check that there are no elements.
2364 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2365 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2366 __ cmp(r2, r4);
2367 call_runtime.Branch(ne);
2368 // Check that instance descriptors are not empty so that we can
2369 // check for an enum cache. Leave the map in r3 for the subsequent
2370 // prototype load.
2371 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2372 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2373 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2374 __ cmp(r2, ip);
2375 call_runtime.Branch(eq);
2376 // Check that there in an enum cache in the non-empty instance
2377 // descriptors. This is the case if the next enumeration index
2378 // field does not contain a smi.
2379 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2380 __ tst(r2, Operand(kSmiTagMask));
2381 call_runtime.Branch(eq);
2382 // For all objects but the receiver, check that the cache is empty.
2383 // r4: empty fixed array root.
2384 __ cmp(r1, r0);
2385 check_prototype.Branch(eq);
2386 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2387 __ cmp(r2, r4);
2388 call_runtime.Branch(ne);
2389 check_prototype.Bind();
2390 // Load the prototype from the map and loop if non-null.
2391 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2392 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2393 __ cmp(r1, ip);
2394 loop.Branch(ne);
2395 // The enum cache is valid. Load the map of the object being
2396 // iterated over and use the cache for the iteration.
2397 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2398 use_cache.Jump();
2399
2400 call_runtime.Bind();
2401 // Call the runtime to get the property names for the object.
2402 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002403 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2404
Steve Blockd0582a62009-12-15 09:54:21 +00002405 // If we got a map from the runtime call, we can do a fast
2406 // modification check. Otherwise, we got a fixed array, and we have
2407 // to do a slow check.
2408 // r0: map or fixed array (result from call to
2409 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002410 __ mov(r2, Operand(r0));
2411 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2412 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2413 __ cmp(r1, ip);
2414 fixed_array.Branch(ne);
2415
Steve Blockd0582a62009-12-15 09:54:21 +00002416 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002417 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002418 // r0: map (either the result from a call to
2419 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2420 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002421 __ mov(r1, Operand(r0));
2422 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2423 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2424 __ ldr(r2,
2425 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2426
2427 frame_->EmitPush(r0); // map
2428 frame_->EmitPush(r2); // enum cache bridge cache
2429 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002430 frame_->EmitPush(r0);
2431 __ mov(r0, Operand(Smi::FromInt(0)));
2432 frame_->EmitPush(r0);
2433 entry.Jump();
2434
2435 fixed_array.Bind();
2436 __ mov(r1, Operand(Smi::FromInt(0)));
2437 frame_->EmitPush(r1); // insert 0 in place of Map
2438 frame_->EmitPush(r0);
2439
2440 // Push the length of the array and the initial index onto the stack.
2441 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002442 frame_->EmitPush(r0);
2443 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2444 frame_->EmitPush(r0);
2445
2446 // Condition.
2447 entry.Bind();
2448 // sp[0] : index
2449 // sp[1] : array/enum cache length
2450 // sp[2] : array or enum cache
2451 // sp[3] : 0 or map
2452 // sp[4] : enumerable
2453 // Grab the current frame's height for the break and continue
2454 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002455 node->break_target()->SetExpectedHeight();
2456 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002457
Kristian Monsen25f61362010-05-21 11:50:48 +01002458 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002459 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002460 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002461 node->break_target()->Branch(hs);
2462
Steve Blocka7e24c12009-10-30 11:49:00 +00002463 // Get the i'th entry of the array.
2464 __ ldr(r2, frame_->ElementAt(2));
2465 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2466 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2467
2468 // Get Map or 0.
2469 __ ldr(r2, frame_->ElementAt(3));
2470 // Check if this (still) matches the map of the enumerable.
2471 // If not, we have to filter the key.
2472 __ ldr(r1, frame_->ElementAt(4));
2473 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2474 __ cmp(r1, Operand(r2));
2475 end_del_check.Branch(eq);
2476
2477 // Convert the entry to a string (or null if it isn't a property anymore).
2478 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2479 frame_->EmitPush(r0);
2480 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002481 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002482 __ mov(r3, Operand(r0));
2483
2484 // If the property has been removed while iterating, we just skip it.
2485 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2486 __ cmp(r3, ip);
2487 node->continue_target()->Branch(eq);
2488
2489 end_del_check.Bind();
2490 // Store the entry in the 'each' expression and take another spin in the
2491 // loop. r3: i'th entry of the enum cache (or string there of)
2492 frame_->EmitPush(r3); // push entry
2493 { Reference each(this, node->each());
2494 if (!each.is_illegal()) {
2495 if (each.size() > 0) {
2496 __ ldr(r0, frame_->ElementAt(each.size()));
2497 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002498 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Leon Clarked91b9f72010-01-27 17:25:45 +00002499 frame_->Drop(2);
2500 } else {
2501 // If the reference was to a slot we rely on the convenient property
2502 // that it doesn't matter whether a value (eg, r3 pushed above) is
2503 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002504 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Leon Clarked91b9f72010-01-27 17:25:45 +00002505 frame_->Drop();
Steve Blocka7e24c12009-10-30 11:49:00 +00002506 }
2507 }
2508 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002509 // Body.
2510 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002511 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002512
2513 // Next. Reestablish a spilled frame in case we are coming here via
2514 // a continue in the body.
2515 node->continue_target()->Bind();
2516 frame_->SpillAll();
2517 frame_->EmitPop(r0);
2518 __ add(r0, r0, Operand(Smi::FromInt(1)));
2519 frame_->EmitPush(r0);
2520 entry.Jump();
2521
2522 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2523 // any frame.
2524 node->break_target()->Bind();
2525 frame_->Drop(5);
2526
2527 // Exit.
2528 exit.Bind();
2529 node->continue_target()->Unuse();
2530 node->break_target()->Unuse();
2531 ASSERT(frame_->height() == original_height);
2532}
2533
2534
Steve Block3ce2e202009-11-05 08:53:23 +00002535void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002536#ifdef DEBUG
2537 int original_height = frame_->height();
2538#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002539 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002540 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002541 CodeForStatementPosition(node);
2542
2543 JumpTarget try_block;
2544 JumpTarget exit;
2545
2546 try_block.Call();
2547 // --- Catch block ---
2548 frame_->EmitPush(r0);
2549
2550 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002551 Variable* catch_var = node->catch_var()->var();
2552 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
2553 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002554
2555 // Remove the exception from the stack.
2556 frame_->Drop();
2557
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002558 VisitStatements(node->catch_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002559 if (frame_ != NULL) {
2560 exit.Jump();
2561 }
2562
2563
2564 // --- Try block ---
2565 try_block.Bind();
2566
2567 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2568 int handler_height = frame_->height();
2569
2570 // Shadow the labels for all escapes from the try block, including
2571 // returns. During shadowing, the original label is hidden as the
2572 // LabelShadow and operations on the original actually affect the
2573 // shadowing label.
2574 //
2575 // We should probably try to unify the escaping labels and the return
2576 // label.
2577 int nof_escapes = node->escaping_targets()->length();
2578 List<ShadowTarget*> shadows(1 + nof_escapes);
2579
2580 // Add the shadow target for the function return.
2581 static const int kReturnShadowIndex = 0;
2582 shadows.Add(new ShadowTarget(&function_return_));
2583 bool function_return_was_shadowed = function_return_is_shadowed_;
2584 function_return_is_shadowed_ = true;
2585 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2586
2587 // Add the remaining shadow targets.
2588 for (int i = 0; i < nof_escapes; i++) {
2589 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2590 }
2591
2592 // Generate code for the statements in the try block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002593 VisitStatements(node->try_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002594
2595 // Stop the introduced shadowing and count the number of required unlinks.
2596 // After shadowing stops, the original labels are unshadowed and the
2597 // LabelShadows represent the formerly shadowing labels.
2598 bool has_unlinks = false;
2599 for (int i = 0; i < shadows.length(); i++) {
2600 shadows[i]->StopShadowing();
2601 has_unlinks = has_unlinks || shadows[i]->is_linked();
2602 }
2603 function_return_is_shadowed_ = function_return_was_shadowed;
2604
2605 // Get an external reference to the handler address.
2606 ExternalReference handler_address(Top::k_handler_address);
2607
2608 // If we can fall off the end of the try block, unlink from try chain.
2609 if (has_valid_frame()) {
2610 // The next handler address is on top of the frame. Unlink from
2611 // the handler list and drop the rest of this handler from the
2612 // frame.
2613 ASSERT(StackHandlerConstants::kNextOffset == 0);
2614 frame_->EmitPop(r1);
2615 __ mov(r3, Operand(handler_address));
2616 __ str(r1, MemOperand(r3));
2617 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2618 if (has_unlinks) {
2619 exit.Jump();
2620 }
2621 }
2622
2623 // Generate unlink code for the (formerly) shadowing labels that have been
2624 // jumped to. Deallocate each shadow target.
2625 for (int i = 0; i < shadows.length(); i++) {
2626 if (shadows[i]->is_linked()) {
2627 // Unlink from try chain;
2628 shadows[i]->Bind();
2629 // Because we can be jumping here (to spilled code) from unspilled
2630 // code, we need to reestablish a spilled frame at this block.
2631 frame_->SpillAll();
2632
2633 // Reload sp from the top handler, because some statements that we
2634 // break from (eg, for...in) may have left stuff on the stack.
2635 __ mov(r3, Operand(handler_address));
2636 __ ldr(sp, MemOperand(r3));
2637 frame_->Forget(frame_->height() - handler_height);
2638
2639 ASSERT(StackHandlerConstants::kNextOffset == 0);
2640 frame_->EmitPop(r1);
2641 __ str(r1, MemOperand(r3));
2642 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2643
2644 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2645 frame_->PrepareForReturn();
2646 }
2647 shadows[i]->other_target()->Jump();
2648 }
2649 }
2650
2651 exit.Bind();
2652 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2653}
2654
2655
Steve Block3ce2e202009-11-05 08:53:23 +00002656void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002657#ifdef DEBUG
2658 int original_height = frame_->height();
2659#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002660 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002661 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002662 CodeForStatementPosition(node);
2663
2664 // State: Used to keep track of reason for entering the finally
2665 // block. Should probably be extended to hold information for
2666 // break/continue from within the try block.
2667 enum { FALLING, THROWING, JUMPING };
2668
2669 JumpTarget try_block;
2670 JumpTarget finally_block;
2671
2672 try_block.Call();
2673
2674 frame_->EmitPush(r0); // save exception object on the stack
2675 // In case of thrown exceptions, this is where we continue.
2676 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2677 finally_block.Jump();
2678
2679 // --- Try block ---
2680 try_block.Bind();
2681
2682 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2683 int handler_height = frame_->height();
2684
2685 // Shadow the labels for all escapes from the try block, including
2686 // returns. Shadowing hides the original label as the LabelShadow and
2687 // operations on the original actually affect the shadowing label.
2688 //
2689 // We should probably try to unify the escaping labels and the return
2690 // label.
2691 int nof_escapes = node->escaping_targets()->length();
2692 List<ShadowTarget*> shadows(1 + nof_escapes);
2693
2694 // Add the shadow target for the function return.
2695 static const int kReturnShadowIndex = 0;
2696 shadows.Add(new ShadowTarget(&function_return_));
2697 bool function_return_was_shadowed = function_return_is_shadowed_;
2698 function_return_is_shadowed_ = true;
2699 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2700
2701 // Add the remaining shadow targets.
2702 for (int i = 0; i < nof_escapes; i++) {
2703 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2704 }
2705
2706 // Generate code for the statements in the try block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002707 VisitStatements(node->try_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002708
2709 // Stop the introduced shadowing and count the number of required unlinks.
2710 // After shadowing stops, the original labels are unshadowed and the
2711 // LabelShadows represent the formerly shadowing labels.
2712 int nof_unlinks = 0;
2713 for (int i = 0; i < shadows.length(); i++) {
2714 shadows[i]->StopShadowing();
2715 if (shadows[i]->is_linked()) nof_unlinks++;
2716 }
2717 function_return_is_shadowed_ = function_return_was_shadowed;
2718
2719 // Get an external reference to the handler address.
2720 ExternalReference handler_address(Top::k_handler_address);
2721
2722 // If we can fall off the end of the try block, unlink from the try
2723 // chain and set the state on the frame to FALLING.
2724 if (has_valid_frame()) {
2725 // The next handler address is on top of the frame.
2726 ASSERT(StackHandlerConstants::kNextOffset == 0);
2727 frame_->EmitPop(r1);
2728 __ mov(r3, Operand(handler_address));
2729 __ str(r1, MemOperand(r3));
2730 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2731
2732 // Fake a top of stack value (unneeded when FALLING) and set the
2733 // state in r2, then jump around the unlink blocks if any.
2734 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2735 frame_->EmitPush(r0);
2736 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2737 if (nof_unlinks > 0) {
2738 finally_block.Jump();
2739 }
2740 }
2741
2742 // Generate code to unlink and set the state for the (formerly)
2743 // shadowing targets that have been jumped to.
2744 for (int i = 0; i < shadows.length(); i++) {
2745 if (shadows[i]->is_linked()) {
2746 // If we have come from the shadowed return, the return value is
2747 // in (a non-refcounted reference to) r0. We must preserve it
2748 // until it is pushed.
2749 //
2750 // Because we can be jumping here (to spilled code) from
2751 // unspilled code, we need to reestablish a spilled frame at
2752 // this block.
2753 shadows[i]->Bind();
2754 frame_->SpillAll();
2755
2756 // Reload sp from the top handler, because some statements that
2757 // we break from (eg, for...in) may have left stuff on the
2758 // stack.
2759 __ mov(r3, Operand(handler_address));
2760 __ ldr(sp, MemOperand(r3));
2761 frame_->Forget(frame_->height() - handler_height);
2762
2763 // Unlink this handler and drop it from the frame. The next
2764 // handler address is currently on top of the frame.
2765 ASSERT(StackHandlerConstants::kNextOffset == 0);
2766 frame_->EmitPop(r1);
2767 __ str(r1, MemOperand(r3));
2768 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2769
2770 if (i == kReturnShadowIndex) {
2771 // If this label shadowed the function return, materialize the
2772 // return value on the stack.
2773 frame_->EmitPush(r0);
2774 } else {
2775 // Fake TOS for targets that shadowed breaks and continues.
2776 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2777 frame_->EmitPush(r0);
2778 }
2779 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
2780 if (--nof_unlinks > 0) {
2781 // If this is not the last unlink block, jump around the next.
2782 finally_block.Jump();
2783 }
2784 }
2785 }
2786
2787 // --- Finally block ---
2788 finally_block.Bind();
2789
2790 // Push the state on the stack.
2791 frame_->EmitPush(r2);
2792
2793 // We keep two elements on the stack - the (possibly faked) result
2794 // and the state - while evaluating the finally block.
2795 //
2796 // Generate code for the statements in the finally block.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002797 VisitStatements(node->finally_block()->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002798
2799 if (has_valid_frame()) {
2800 // Restore state and return value or faked TOS.
2801 frame_->EmitPop(r2);
2802 frame_->EmitPop(r0);
2803 }
2804
2805 // Generate code to jump to the right destination for all used
2806 // formerly shadowing targets. Deallocate each shadow target.
2807 for (int i = 0; i < shadows.length(); i++) {
2808 if (has_valid_frame() && shadows[i]->is_bound()) {
2809 JumpTarget* original = shadows[i]->other_target();
2810 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
2811 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2812 JumpTarget skip;
2813 skip.Branch(ne);
2814 frame_->PrepareForReturn();
2815 original->Jump();
2816 skip.Bind();
2817 } else {
2818 original->Branch(eq);
2819 }
2820 }
2821 }
2822
2823 if (has_valid_frame()) {
2824 // Check if we need to rethrow the exception.
2825 JumpTarget exit;
2826 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
2827 exit.Branch(ne);
2828
2829 // Rethrow exception.
2830 frame_->EmitPush(r0);
2831 frame_->CallRuntime(Runtime::kReThrow, 1);
2832
2833 // Done.
2834 exit.Bind();
2835 }
2836 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2837}
2838
2839
2840void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2841#ifdef DEBUG
2842 int original_height = frame_->height();
2843#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002844 Comment cmnt(masm_, "[ DebuggerStatament");
2845 CodeForStatementPosition(node);
2846#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00002847 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00002848#endif
2849 // Ignore the return value.
2850 ASSERT(frame_->height() == original_height);
2851}
2852
2853
Steve Block6ded16b2010-05-10 14:33:55 +01002854void CodeGenerator::InstantiateFunction(
2855 Handle<SharedFunctionInfo> function_info) {
Leon Clarkee46be812010-01-19 14:06:41 +00002856 // Use the fast case closure allocation code that allocates in new
2857 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01002858 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00002859 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002860 frame_->EmitPush(Operand(function_info));
2861 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00002862 frame_->CallStub(&stub, 1);
2863 frame_->EmitPush(r0);
2864 } else {
2865 // Create a new closure.
2866 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002867 frame_->EmitPush(Operand(function_info));
Leon Clarkee46be812010-01-19 14:06:41 +00002868 frame_->CallRuntime(Runtime::kNewClosure, 2);
2869 frame_->EmitPush(r0);
2870 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002871}
2872
2873
2874void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2875#ifdef DEBUG
2876 int original_height = frame_->height();
2877#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002878 Comment cmnt(masm_, "[ FunctionLiteral");
2879
Steve Block6ded16b2010-05-10 14:33:55 +01002880 // Build the function info and instantiate it.
2881 Handle<SharedFunctionInfo> function_info =
2882 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002883 // Check for stack-overflow exception.
2884 if (HasStackOverflow()) {
2885 ASSERT(frame_->height() == original_height);
2886 return;
2887 }
Steve Block6ded16b2010-05-10 14:33:55 +01002888 InstantiateFunction(function_info);
2889 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002890}
2891
2892
Steve Block6ded16b2010-05-10 14:33:55 +01002893void CodeGenerator::VisitSharedFunctionInfoLiteral(
2894 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002895#ifdef DEBUG
2896 int original_height = frame_->height();
2897#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002898 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
2899 InstantiateFunction(node->shared_function_info());
2900 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002901}
2902
2903
2904void CodeGenerator::VisitConditional(Conditional* node) {
2905#ifdef DEBUG
2906 int original_height = frame_->height();
2907#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002908 Comment cmnt(masm_, "[ Conditional");
2909 JumpTarget then;
2910 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002911 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002912 if (has_valid_frame()) {
2913 Branch(false, &else_);
2914 }
2915 if (has_valid_frame() || then.is_linked()) {
2916 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002917 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002918 }
2919 if (else_.is_linked()) {
2920 JumpTarget exit;
2921 if (has_valid_frame()) exit.Jump();
2922 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002923 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002924 if (exit.is_linked()) exit.Bind();
2925 }
Steve Block6ded16b2010-05-10 14:33:55 +01002926 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00002927}
2928
2929
2930void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002931 if (slot->type() == Slot::LOOKUP) {
2932 ASSERT(slot->var()->is_dynamic());
2933
Steve Block6ded16b2010-05-10 14:33:55 +01002934 // JumpTargets do not yet support merging frames so the frame must be
2935 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00002936 JumpTarget slow;
2937 JumpTarget done;
2938
Kristian Monsen25f61362010-05-21 11:50:48 +01002939 // Generate fast case for loading from slots that correspond to
2940 // local/global variables or arguments unless they are shadowed by
2941 // eval-introduced bindings.
2942 EmitDynamicLoadFromSlotFastCase(slot,
2943 typeof_state,
2944 &slow,
2945 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00002946
2947 slow.Bind();
2948 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01002949 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002950
2951 if (typeof_state == INSIDE_TYPEOF) {
2952 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
2953 } else {
2954 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2955 }
2956
2957 done.Bind();
2958 frame_->EmitPush(r0);
2959
2960 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002961 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002962 TypeInfo info = type_info(slot);
2963 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01002964
Steve Blocka7e24c12009-10-30 11:49:00 +00002965 if (slot->var()->mode() == Variable::CONST) {
2966 // Const slots may contain 'the hole' value (the constant hasn't been
2967 // initialized yet) which needs to be converted into the 'undefined'
2968 // value.
2969 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01002970 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00002971 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01002972 __ cmp(tos, ip);
2973 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
2974 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00002975 }
2976 }
2977}
2978
2979
Steve Block6ded16b2010-05-10 14:33:55 +01002980void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
2981 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01002982 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01002983 LoadFromSlot(slot, state);
2984
2985 // Bail out quickly if we're not using lazy arguments allocation.
2986 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
2987
2988 // ... or if the slot isn't a non-parameter arguments slot.
2989 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
2990
Steve Block8defd9f2010-07-08 12:39:36 +01002991 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01002992 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01002993 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01002994
2995 // If the loaded value is the sentinel that indicates that we
2996 // haven't loaded the arguments object yet, we need to do it now.
2997 JumpTarget exit;
2998 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01002999 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003000 exit.Branch(ne);
3001 frame_->Drop();
3002 StoreArgumentsObject(false);
3003 exit.Bind();
3004}
3005
3006
Leon Clarkee46be812010-01-19 14:06:41 +00003007void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3008 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003009 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003010 if (slot->type() == Slot::LOOKUP) {
3011 ASSERT(slot->var()->is_dynamic());
3012
3013 // For now, just do a runtime call.
3014 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003015 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003016
3017 if (init_state == CONST_INIT) {
3018 // Same as the case for a normal store, but ignores attribute
3019 // (e.g. READ_ONLY) of context slot so that we can initialize
3020 // const properties (introduced via eval("const foo = (some
3021 // expr);")). Also, uses the current function context instead of
3022 // the top context.
3023 //
3024 // Note that we must declare the foo upon entry of eval(), via a
3025 // context slot declaration, but we cannot initialize it at the
3026 // same time, because the const declaration may be at the end of
3027 // the eval code (sigh...) and the const variable may have been
3028 // used before (where its value is 'undefined'). Thus, we can only
3029 // do the initialization when we actually encounter the expression
3030 // and when the expression operands are defined and valid, and
3031 // thus we need the split into 2 operations: declaration of the
3032 // context slot followed by initialization.
3033 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3034 } else {
3035 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3036 }
3037 // Storing a variable must keep the (new) value on the expression
3038 // stack. This is necessary for compiling assignment expressions.
3039 frame_->EmitPush(r0);
3040
3041 } else {
3042 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003043 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003044 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003045
Steve Block6ded16b2010-05-10 14:33:55 +01003046 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003047 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003048
Leon Clarkee46be812010-01-19 14:06:41 +00003049 if (init_state == CONST_INIT) {
3050 ASSERT(slot->var()->mode() == Variable::CONST);
3051 // Only the first const initialization must be executed (the slot
3052 // still contains 'the hole' value). When the assignment is
3053 // executed, the code is identical to a normal store (see below).
3054 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003055 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003056 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003057 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003058 exit.Branch(ne);
3059 }
3060
3061 // We must execute the store. Storing a variable must keep the
3062 // (new) value on the stack. This is necessary for compiling
3063 // assignment expressions.
3064 //
3065 // Note: We will reach here even with slot->var()->mode() ==
3066 // Variable::CONST because of const declarations which will
3067 // initialize consts to 'the hole' value and by doing so, end up
3068 // calling this code. r2 may be loaded with context; used below in
3069 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003070 Register tos = frame_->Peek();
3071 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003072 if (slot->type() == Slot::CONTEXT) {
3073 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003074 __ tst(tos, Operand(kSmiTagMask));
3075 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003076 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003077 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003078 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003079 // We need an extra register. Until we have a way to do that in the
3080 // virtual frame we will cheat and ask for a free TOS register.
3081 Register scratch3 = frame_->GetTOSRegister();
3082 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003083 }
3084 // If we definitely did not jump over the assignment, we do not need
3085 // to bind the exit label. Doing so can defeat peephole
3086 // optimization.
3087 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3088 exit.Bind();
3089 }
3090 }
3091}
3092
3093
Steve Blocka7e24c12009-10-30 11:49:00 +00003094void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3095 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003096 JumpTarget* slow) {
3097 // Check that no extension objects have been created by calls to
3098 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003099 Register tmp = frame_->scratch0();
3100 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003101 Register context = cp;
3102 Scope* s = scope();
3103 while (s != NULL) {
3104 if (s->num_heap_slots() > 0) {
3105 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003106 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003107 // Check that extension is NULL.
3108 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3109 __ tst(tmp2, tmp2);
3110 slow->Branch(ne);
3111 }
3112 // Load next context in chain.
3113 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3114 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3115 context = tmp;
3116 }
3117 // If no outer scope calls eval, we do not need to check more
3118 // context extensions.
3119 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3120 s = s->outer_scope();
3121 }
3122
3123 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003124 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003125 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003126 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003127 __ bind(&next);
3128 // Terminate at global context.
3129 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3130 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3131 __ cmp(tmp2, ip);
3132 __ b(eq, &fast);
3133 // Check that extension is NULL.
3134 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3135 __ tst(tmp2, tmp2);
3136 slow->Branch(ne);
3137 // Load next context in chain.
3138 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3139 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3140 __ b(&next);
3141 __ bind(&fast);
3142 }
3143
Steve Blocka7e24c12009-10-30 11:49:00 +00003144 // Load the global object.
3145 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003146 // Setup the name register and call load IC.
3147 frame_->CallLoadIC(slot->var()->name(),
3148 typeof_state == INSIDE_TYPEOF
3149 ? RelocInfo::CODE_TARGET
3150 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003151}
3152
3153
Kristian Monsen25f61362010-05-21 11:50:48 +01003154void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3155 TypeofState typeof_state,
3156 JumpTarget* slow,
3157 JumpTarget* done) {
3158 // Generate fast-case code for variables that might be shadowed by
3159 // eval-introduced variables. Eval is used a lot without
3160 // introducing variables. In those cases, we do not want to
3161 // perform a runtime call for all variables in the scope
3162 // containing the eval.
3163 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3164 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3165 frame_->SpillAll();
3166 done->Jump();
3167
3168 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3169 frame_->SpillAll();
3170 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
3171 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3172 if (potential_slot != NULL) {
3173 // Generate fast case for locals that rewrite to slots.
3174 __ ldr(r0,
3175 ContextSlotOperandCheckExtensions(potential_slot,
3176 r1,
3177 r2,
3178 slow));
3179 if (potential_slot->var()->mode() == Variable::CONST) {
3180 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3181 __ cmp(r0, ip);
3182 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3183 }
3184 done->Jump();
3185 } else if (rewrite != NULL) {
3186 // Generate fast case for argument loads.
3187 Property* property = rewrite->AsProperty();
3188 if (property != NULL) {
3189 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3190 Literal* key_literal = property->key()->AsLiteral();
3191 if (obj_proxy != NULL &&
3192 key_literal != NULL &&
3193 obj_proxy->IsArguments() &&
3194 key_literal->handle()->IsSmi()) {
3195 // Load arguments object if there are no eval-introduced
3196 // variables. Then load the argument from the arguments
3197 // object using keyed load.
3198 __ ldr(r0,
3199 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
3200 r1,
3201 r2,
3202 slow));
3203 frame_->EmitPush(r0);
3204 __ mov(r1, Operand(key_literal->handle()));
3205 frame_->EmitPush(r1);
3206 EmitKeyedLoad();
3207 done->Jump();
3208 }
3209 }
3210 }
3211 }
3212}
3213
3214
Steve Blocka7e24c12009-10-30 11:49:00 +00003215void CodeGenerator::VisitSlot(Slot* node) {
3216#ifdef DEBUG
3217 int original_height = frame_->height();
3218#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003219 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003220 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3221 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003222}
3223
3224
3225void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3226#ifdef DEBUG
3227 int original_height = frame_->height();
3228#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003229 Comment cmnt(masm_, "[ VariableProxy");
3230
3231 Variable* var = node->var();
3232 Expression* expr = var->rewrite();
3233 if (expr != NULL) {
3234 Visit(expr);
3235 } else {
3236 ASSERT(var->is_global());
3237 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003238 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003239 }
Steve Block6ded16b2010-05-10 14:33:55 +01003240 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003241}
3242
3243
3244void CodeGenerator::VisitLiteral(Literal* node) {
3245#ifdef DEBUG
3246 int original_height = frame_->height();
3247#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003248 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003249 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003250 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003251 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003252 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003253 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003254}
3255
3256
3257void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3258#ifdef DEBUG
3259 int original_height = frame_->height();
3260#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003261 Comment cmnt(masm_, "[ RexExp Literal");
3262
Steve Block8defd9f2010-07-08 12:39:36 +01003263 Register tmp = VirtualFrame::scratch0();
3264 // Free up a TOS register that can be used to push the literal.
3265 Register literal = frame_->GetTOSRegister();
3266
Steve Blocka7e24c12009-10-30 11:49:00 +00003267 // Retrieve the literal array and check the allocated entry.
3268
3269 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003270 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003271
3272 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003273 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003274
3275 // Load the literal at the ast saved index.
3276 int literal_offset =
3277 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003278 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003279
3280 JumpTarget done;
3281 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003282 __ cmp(literal, ip);
3283 // This branch locks the virtual frame at the done label to match the
3284 // one we have here, where the literal register is not on the stack and
3285 // nothing is spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00003286 done.Branch(ne);
3287
Steve Block8defd9f2010-07-08 12:39:36 +01003288 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003289 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003290 // literal array (0)
3291 frame_->EmitPush(tmp);
3292 // literal index (1)
3293 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3294 // RegExp pattern (2)
3295 frame_->EmitPush(Operand(node->pattern()));
3296 // RegExp flags (3)
3297 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003298 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003299 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003300
Steve Block8defd9f2010-07-08 12:39:36 +01003301 // This call to bind will get us back to the virtual frame we had before
3302 // where things are not spilled and the literal register is not on the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00003303 done.Bind();
3304 // Push the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003305 frame_->EmitPush(literal);
Steve Block6ded16b2010-05-10 14:33:55 +01003306 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003307}
3308
3309
Steve Blocka7e24c12009-10-30 11:49:00 +00003310void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3311#ifdef DEBUG
3312 int original_height = frame_->height();
3313#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003314 Comment cmnt(masm_, "[ ObjectLiteral");
3315
Steve Block8defd9f2010-07-08 12:39:36 +01003316 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003317 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003318 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003319 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003320 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3321 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003322 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003323 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003324 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003325 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003326 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003327 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003328 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003329 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003330 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003331 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003332 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003333 frame_->EmitPush(r0); // save the result
Steve Blocka7e24c12009-10-30 11:49:00 +00003334 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003335 // At the start of each iteration, the top of stack contains
3336 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003337 ObjectLiteral::Property* property = node->properties()->at(i);
3338 Literal* key = property->key();
3339 Expression* value = property->value();
3340 switch (property->kind()) {
3341 case ObjectLiteral::Property::CONSTANT:
3342 break;
3343 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3344 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3345 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003346 case ObjectLiteral::Property::COMPUTED:
3347 if (key->handle()->IsSymbol()) {
3348 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003349 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003350 frame_->PopToR0();
3351 // Fetch the object literal.
3352 frame_->SpillAllButCopyTOSToR1();
Andrei Popescu402d9372010-02-26 13:31:12 +00003353 __ mov(r2, Operand(key->handle()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003354 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3355 break;
3356 }
3357 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003358 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003359 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003360 Load(key);
3361 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003362 frame_->CallRuntime(Runtime::kSetProperty, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003363 break;
3364 }
3365 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003366 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003367 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003368 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003369 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003370 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003371 break;
3372 }
3373 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003374 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003375 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003376 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003377 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003378 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003379 break;
3380 }
3381 }
3382 }
Steve Block6ded16b2010-05-10 14:33:55 +01003383 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003384}
3385
3386
Steve Blocka7e24c12009-10-30 11:49:00 +00003387void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3388#ifdef DEBUG
3389 int original_height = frame_->height();
3390#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003391 Comment cmnt(masm_, "[ ArrayLiteral");
3392
Steve Block8defd9f2010-07-08 12:39:36 +01003393 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003394 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003395 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003396 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003397 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3398 frame_->EmitPush(tos);
3399 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3400 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003401 int length = node->values()->length();
Leon Clarkee46be812010-01-19 14:06:41 +00003402 if (node->depth() > 1) {
3403 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003404 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003405 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003406 } else {
3407 FastCloneShallowArrayStub stub(length);
3408 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003409 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003410 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003411 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003412
3413 // Generate code to set the elements in the array that are not
3414 // literals.
3415 for (int i = 0; i < node->values()->length(); i++) {
3416 Expression* value = node->values()->at(i);
3417
3418 // If value is a literal the property value is already set in the
3419 // boilerplate object.
3420 if (value->AsLiteral() != NULL) continue;
3421 // If value is a materialized literal the property value is already set
3422 // in the boilerplate object if it is simple.
3423 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3424
3425 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003426 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003427 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003428 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003429 frame_->SpillAllButCopyTOSToR1();
3430
Steve Blocka7e24c12009-10-30 11:49:00 +00003431 // Get the elements array.
3432 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3433
3434 // Write to the indexed properties array.
3435 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3436 __ str(r0, FieldMemOperand(r1, offset));
3437
3438 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003439 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003440 }
Steve Block6ded16b2010-05-10 14:33:55 +01003441 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003442}
3443
3444
3445void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3446#ifdef DEBUG
3447 int original_height = frame_->height();
3448#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003449 // Call runtime routine to allocate the catch extension object and
3450 // assign the exception value to the catch variable.
3451 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003452 Load(node->key());
3453 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003454 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3455 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003456 ASSERT_EQ(original_height + 1, frame_->height());
3457}
3458
3459
3460void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3461#ifdef DEBUG
3462 int original_height = frame_->height();
3463#endif
3464 Comment cmnt(masm(), "[ Variable Assignment");
3465 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3466 ASSERT(var != NULL);
3467 Slot* slot = var->slot();
3468 ASSERT(slot != NULL);
3469
3470 // Evaluate the right-hand side.
3471 if (node->is_compound()) {
3472 // For a compound assignment the right-hand side is a binary operation
3473 // between the current property value and the actual right-hand side.
3474 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3475
3476 // Perform the binary operation.
3477 Literal* literal = node->value()->AsLiteral();
3478 bool overwrite_value =
3479 (node->value()->AsBinaryOperation() != NULL &&
3480 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3481 if (literal != NULL && literal->handle()->IsSmi()) {
3482 SmiOperation(node->binary_op(),
3483 literal->handle(),
3484 false,
3485 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3486 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003487 GenerateInlineSmi inline_smi =
3488 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3489 if (literal != NULL) {
3490 ASSERT(!literal->handle()->IsSmi());
3491 inline_smi = DONT_GENERATE_INLINE_SMI;
3492 }
Steve Block6ded16b2010-05-10 14:33:55 +01003493 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003494 GenericBinaryOperation(node->binary_op(),
3495 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3496 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003497 }
3498 } else {
3499 Load(node->value());
3500 }
3501
3502 // Perform the assignment.
3503 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3504 CodeForSourcePosition(node->position());
3505 StoreToSlot(slot,
3506 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3507 }
3508 ASSERT_EQ(original_height + 1, frame_->height());
3509}
3510
3511
3512void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3513#ifdef DEBUG
3514 int original_height = frame_->height();
3515#endif
3516 Comment cmnt(masm(), "[ Named Property Assignment");
3517 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3518 Property* prop = node->target()->AsProperty();
3519 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3520
3521 // Initialize name and evaluate the receiver sub-expression if necessary. If
3522 // the receiver is trivial it is not placed on the stack at this point, but
3523 // loaded whenever actually needed.
3524 Handle<String> name;
3525 bool is_trivial_receiver = false;
3526 if (var != NULL) {
3527 name = var->name();
3528 } else {
3529 Literal* lit = prop->key()->AsLiteral();
3530 ASSERT_NOT_NULL(lit);
3531 name = Handle<String>::cast(lit->handle());
3532 // Do not materialize the receiver on the frame if it is trivial.
3533 is_trivial_receiver = prop->obj()->IsTrivial();
3534 if (!is_trivial_receiver) Load(prop->obj());
3535 }
3536
3537 // Change to slow case in the beginning of an initialization block to
3538 // avoid the quadratic behavior of repeatedly adding fast properties.
3539 if (node->starts_initialization_block()) {
3540 // Initialization block consists of assignments of the form expr.x = ..., so
3541 // this will never be an assignment to a variable, so there must be a
3542 // receiver object.
3543 ASSERT_EQ(NULL, var);
3544 if (is_trivial_receiver) {
3545 Load(prop->obj());
3546 } else {
3547 frame_->Dup();
3548 }
3549 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3550 }
3551
3552 // Change to fast case at the end of an initialization block. To prepare for
3553 // that add an extra copy of the receiver to the frame, so that it can be
3554 // converted back to fast case after the assignment.
3555 if (node->ends_initialization_block() && !is_trivial_receiver) {
3556 frame_->Dup();
3557 }
3558
3559 // Stack layout:
3560 // [tos] : receiver (only materialized if non-trivial)
3561 // [tos+1] : receiver if at the end of an initialization block
3562
3563 // Evaluate the right-hand side.
3564 if (node->is_compound()) {
3565 // For a compound assignment the right-hand side is a binary operation
3566 // between the current property value and the actual right-hand side.
3567 if (is_trivial_receiver) {
3568 Load(prop->obj());
3569 } else if (var != NULL) {
3570 LoadGlobal();
3571 } else {
3572 frame_->Dup();
3573 }
3574 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003575
3576 // Perform the binary operation.
3577 Literal* literal = node->value()->AsLiteral();
3578 bool overwrite_value =
3579 (node->value()->AsBinaryOperation() != NULL &&
3580 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3581 if (literal != NULL && literal->handle()->IsSmi()) {
3582 SmiOperation(node->binary_op(),
3583 literal->handle(),
3584 false,
3585 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3586 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003587 GenerateInlineSmi inline_smi =
3588 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3589 if (literal != NULL) {
3590 ASSERT(!literal->handle()->IsSmi());
3591 inline_smi = DONT_GENERATE_INLINE_SMI;
3592 }
Steve Block6ded16b2010-05-10 14:33:55 +01003593 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003594 GenericBinaryOperation(node->binary_op(),
3595 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3596 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003597 }
3598 } else {
3599 // For non-compound assignment just load the right-hand side.
3600 Load(node->value());
3601 }
3602
3603 // Stack layout:
3604 // [tos] : value
3605 // [tos+1] : receiver (only materialized if non-trivial)
3606 // [tos+2] : receiver if at the end of an initialization block
3607
3608 // Perform the assignment. It is safe to ignore constants here.
3609 ASSERT(var == NULL || var->mode() != Variable::CONST);
3610 ASSERT_NE(Token::INIT_CONST, node->op());
3611 if (is_trivial_receiver) {
3612 // Load the receiver and swap with the value.
3613 Load(prop->obj());
3614 Register t0 = frame_->PopToRegister();
3615 Register t1 = frame_->PopToRegister(t0);
3616 frame_->EmitPush(t0);
3617 frame_->EmitPush(t1);
3618 }
3619 CodeForSourcePosition(node->position());
3620 bool is_contextual = (var != NULL);
3621 EmitNamedStore(name, is_contextual);
3622 frame_->EmitPush(r0);
3623
3624 // Change to fast case at the end of an initialization block.
3625 if (node->ends_initialization_block()) {
3626 ASSERT_EQ(NULL, var);
3627 // The argument to the runtime call is the receiver.
3628 if (is_trivial_receiver) {
3629 Load(prop->obj());
3630 } else {
3631 // A copy of the receiver is below the value of the assignment. Swap
3632 // the receiver and the value of the assignment expression.
3633 Register t0 = frame_->PopToRegister();
3634 Register t1 = frame_->PopToRegister(t0);
3635 frame_->EmitPush(t0);
3636 frame_->EmitPush(t1);
3637 }
3638 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3639 }
3640
3641 // Stack layout:
3642 // [tos] : result
3643
3644 ASSERT_EQ(original_height + 1, frame_->height());
3645}
3646
3647
3648void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3649#ifdef DEBUG
3650 int original_height = frame_->height();
3651#endif
3652 Comment cmnt(masm_, "[ Keyed Property Assignment");
3653 Property* prop = node->target()->AsProperty();
3654 ASSERT_NOT_NULL(prop);
3655
3656 // Evaluate the receiver subexpression.
3657 Load(prop->obj());
3658
Steve Block8defd9f2010-07-08 12:39:36 +01003659 WriteBarrierCharacter wb_info;
3660
Steve Block6ded16b2010-05-10 14:33:55 +01003661 // Change to slow case in the beginning of an initialization block to
3662 // avoid the quadratic behavior of repeatedly adding fast properties.
3663 if (node->starts_initialization_block()) {
3664 frame_->Dup();
3665 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3666 }
3667
3668 // Change to fast case at the end of an initialization block. To prepare for
3669 // that add an extra copy of the receiver to the frame, so that it can be
3670 // converted back to fast case after the assignment.
3671 if (node->ends_initialization_block()) {
3672 frame_->Dup();
3673 }
3674
3675 // Evaluate the key subexpression.
3676 Load(prop->key());
3677
3678 // Stack layout:
3679 // [tos] : key
3680 // [tos+1] : receiver
3681 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003682 //
Steve Block6ded16b2010-05-10 14:33:55 +01003683 // Evaluate the right-hand side.
3684 if (node->is_compound()) {
3685 // For a compound assignment the right-hand side is a binary operation
3686 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003687 // Duplicate receiver and key for loading the current property value.
3688 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003689 EmitKeyedLoad();
3690 frame_->EmitPush(r0);
3691
3692 // Perform the binary operation.
3693 Literal* literal = node->value()->AsLiteral();
3694 bool overwrite_value =
3695 (node->value()->AsBinaryOperation() != NULL &&
3696 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
3697 if (literal != NULL && literal->handle()->IsSmi()) {
3698 SmiOperation(node->binary_op(),
3699 literal->handle(),
3700 false,
3701 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3702 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003703 GenerateInlineSmi inline_smi =
3704 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3705 if (literal != NULL) {
3706 ASSERT(!literal->handle()->IsSmi());
3707 inline_smi = DONT_GENERATE_INLINE_SMI;
3708 }
Steve Block6ded16b2010-05-10 14:33:55 +01003709 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003710 GenericBinaryOperation(node->binary_op(),
3711 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3712 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003713 }
Steve Block8defd9f2010-07-08 12:39:36 +01003714 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01003715 } else {
3716 // For non-compound assignment just load the right-hand side.
3717 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01003718 wb_info = node->value()->AsLiteral() != NULL ?
3719 NEVER_NEWSPACE :
3720 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01003721 }
3722
3723 // Stack layout:
3724 // [tos] : value
3725 // [tos+1] : key
3726 // [tos+2] : receiver
3727 // [tos+3] : receiver if at the end of an initialization block
3728
3729 // Perform the assignment. It is safe to ignore constants here.
3730 ASSERT(node->op() != Token::INIT_CONST);
3731 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01003732 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01003733 frame_->EmitPush(r0);
3734
3735 // Stack layout:
3736 // [tos] : result
3737 // [tos+1] : receiver if at the end of an initialization block
3738
3739 // Change to fast case at the end of an initialization block.
3740 if (node->ends_initialization_block()) {
3741 // The argument to the runtime call is the extra copy of the receiver,
3742 // which is below the value of the assignment. Swap the receiver and
3743 // the value of the assignment expression.
3744 Register t0 = frame_->PopToRegister();
3745 Register t1 = frame_->PopToRegister(t0);
3746 frame_->EmitPush(t1);
3747 frame_->EmitPush(t0);
3748 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3749 }
3750
3751 // Stack layout:
3752 // [tos] : result
3753
3754 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003755}
3756
3757
3758void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01003759 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00003760#ifdef DEBUG
3761 int original_height = frame_->height();
3762#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003763 Comment cmnt(masm_, "[ Assignment");
3764
Steve Block6ded16b2010-05-10 14:33:55 +01003765 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3766 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00003767
Steve Block6ded16b2010-05-10 14:33:55 +01003768 if (var != NULL && !var->is_global()) {
3769 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003770
Steve Block6ded16b2010-05-10 14:33:55 +01003771 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
3772 (var != NULL && var->is_global())) {
3773 // Properties whose keys are property names and global variables are
3774 // treated as named property references. We do not need to consider
3775 // global 'this' because it is not a valid left-hand side.
3776 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003777
Steve Block6ded16b2010-05-10 14:33:55 +01003778 } else if (prop != NULL) {
3779 // Other properties (including rewritten parameters for a function that
3780 // uses arguments) are keyed property assignments.
3781 EmitKeyedPropertyAssignment(node);
3782
3783 } else {
3784 // Invalid left-hand side.
3785 Load(node->target());
3786 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
3787 // The runtime call doesn't actually return but the code generator will
3788 // still generate code and expects a certain frame height.
3789 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003790 }
Steve Block6ded16b2010-05-10 14:33:55 +01003791 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003792}
3793
3794
3795void CodeGenerator::VisitThrow(Throw* node) {
3796#ifdef DEBUG
3797 int original_height = frame_->height();
3798#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003799 Comment cmnt(masm_, "[ Throw");
3800
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003801 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00003802 CodeForSourcePosition(node->position());
3803 frame_->CallRuntime(Runtime::kThrow, 1);
3804 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003805 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003806}
3807
3808
3809void CodeGenerator::VisitProperty(Property* node) {
3810#ifdef DEBUG
3811 int original_height = frame_->height();
3812#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003813 Comment cmnt(masm_, "[ Property");
3814
3815 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003816 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003817 }
Steve Block6ded16b2010-05-10 14:33:55 +01003818 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003819}
3820
3821
3822void CodeGenerator::VisitCall(Call* node) {
3823#ifdef DEBUG
3824 int original_height = frame_->height();
3825#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003826 Comment cmnt(masm_, "[ Call");
3827
3828 Expression* function = node->expression();
3829 ZoneList<Expression*>* args = node->arguments();
3830
3831 // Standard function call.
3832 // Check if the function is a variable or a property.
3833 Variable* var = function->AsVariableProxy()->AsVariable();
3834 Property* property = function->AsProperty();
3835
3836 // ------------------------------------------------------------------------
3837 // Fast-case: Use inline caching.
3838 // ---
3839 // According to ECMA-262, section 11.2.3, page 44, the function to call
3840 // must be resolved after the arguments have been evaluated. The IC code
3841 // automatically handles this by loading the arguments before the function
3842 // is resolved in cache misses (this also holds for megamorphic calls).
3843 // ------------------------------------------------------------------------
3844
3845 if (var != NULL && var->is_possibly_eval()) {
3846 // ----------------------------------
3847 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
3848 // ----------------------------------
3849
3850 // In a call to eval, we first call %ResolvePossiblyDirectEval to
3851 // resolve the function we need to call and the receiver of the
3852 // call. Then we call the resolved function using the given
3853 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003854
Steve Blocka7e24c12009-10-30 11:49:00 +00003855 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003856 Load(function);
3857
3858 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01003859 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003860
3861 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00003862 int arg_count = args->length();
3863 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003864 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00003865 }
3866
Steve Block8defd9f2010-07-08 12:39:36 +01003867 VirtualFrame::SpilledScope spilled_scope(frame_);
3868
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003869 // If we know that eval can only be shadowed by eval-introduced
3870 // variables we attempt to load the global eval function directly
3871 // in generated code. If we succeed, there is no need to perform a
3872 // context lookup in the runtime system.
3873 JumpTarget done;
3874 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
3875 ASSERT(var->slot()->type() == Slot::LOOKUP);
3876 JumpTarget slow;
3877 // Prepare the stack for the call to
3878 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
3879 // function, the first argument to the eval call and the
3880 // receiver.
3881 LoadFromGlobalSlotCheckExtensions(var->slot(),
3882 NOT_INSIDE_TYPEOF,
3883 &slow);
3884 frame_->EmitPush(r0);
3885 if (arg_count > 0) {
3886 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3887 frame_->EmitPush(r1);
3888 } else {
3889 frame_->EmitPush(r2);
3890 }
3891 __ ldr(r1, frame_->Receiver());
3892 frame_->EmitPush(r1);
3893
3894 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
3895
3896 done.Jump();
3897 slow.Bind();
3898 }
3899
3900 // Prepare the stack for the call to ResolvePossiblyDirectEval by
3901 // pushing the loaded function, the first argument to the eval
3902 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003903 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
3904 frame_->EmitPush(r1);
3905 if (arg_count > 0) {
3906 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
3907 frame_->EmitPush(r1);
3908 } else {
3909 frame_->EmitPush(r2);
3910 }
Leon Clarkee46be812010-01-19 14:06:41 +00003911 __ ldr(r1, frame_->Receiver());
3912 frame_->EmitPush(r1);
3913
Steve Blocka7e24c12009-10-30 11:49:00 +00003914 // Resolve the call.
Leon Clarkee46be812010-01-19 14:06:41 +00003915 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003916
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003917 // If we generated fast-case code bind the jump-target where fast
3918 // and slow case merge.
3919 if (done.is_linked()) done.Bind();
3920
Steve Blocka7e24c12009-10-30 11:49:00 +00003921 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00003922 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003923 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
3924
3925 // Call the function.
3926 CodeForSourcePosition(node->position());
3927
3928 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003929 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00003930 frame_->CallStub(&call_function, arg_count + 1);
3931
3932 __ ldr(cp, frame_->Context());
3933 // Remove the function from the stack.
3934 frame_->Drop();
3935 frame_->EmitPush(r0);
3936
3937 } else if (var != NULL && !var->is_this() && var->is_global()) {
3938 // ----------------------------------
3939 // JavaScript example: 'foo(1, 2, 3)' // foo is global
3940 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00003941 // Pass the global object as the receiver and let the IC stub
3942 // patch the stack to use the global proxy as 'this' in the
3943 // invoked function.
3944 LoadGlobal();
3945
3946 // Load the arguments.
3947 int arg_count = args->length();
3948 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003949 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00003950 }
3951
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003952 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00003953 // Setup the name register and call the IC initialization code.
3954 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003955 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
3956 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
3957 CodeForSourcePosition(node->position());
3958 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
3959 arg_count + 1);
3960 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00003961 frame_->EmitPush(r0);
3962
3963 } else if (var != NULL && var->slot() != NULL &&
3964 var->slot()->type() == Slot::LOOKUP) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003965 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00003966 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01003967 // JavaScript examples:
3968 //
3969 // with (obj) foo(1, 2, 3) // foo may be in obj.
3970 //
3971 // function f() {};
3972 // function g() {
3973 // eval(...);
3974 // f(); // f could be in extension object.
3975 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00003976 // ----------------------------------
3977
Kristian Monsen25f61362010-05-21 11:50:48 +01003978 // JumpTargets do not yet support merging frames so the frame must be
3979 // spilled when jumping to these targets.
3980 JumpTarget slow, done;
3981
3982 // Generate fast case for loading functions from slots that
3983 // correspond to local/global variables or arguments unless they
3984 // are shadowed by eval-introduced bindings.
3985 EmitDynamicLoadFromSlotFastCase(var->slot(),
3986 NOT_INSIDE_TYPEOF,
3987 &slow,
3988 &done);
3989
3990 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003991 // Load the function
3992 frame_->EmitPush(cp);
3993 __ mov(r0, Operand(var->name()));
3994 frame_->EmitPush(r0);
3995 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3996 // r0: slot value; r1: receiver
3997
3998 // Load the receiver.
3999 frame_->EmitPush(r0); // function
4000 frame_->EmitPush(r1); // receiver
4001
Kristian Monsen25f61362010-05-21 11:50:48 +01004002 // If fast case code has been generated, emit code to push the
4003 // function and receiver and have the slow path jump around this
4004 // code.
4005 if (done.is_linked()) {
4006 JumpTarget call;
4007 call.Jump();
4008 done.Bind();
4009 frame_->EmitPush(r0); // function
4010 LoadGlobalReceiver(r1); // receiver
4011 call.Bind();
4012 }
4013
4014 // Call the function. At this point, everything is spilled but the
4015 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004016 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004017 frame_->EmitPush(r0);
4018
4019 } else if (property != NULL) {
4020 // Check if the key is a literal string.
4021 Literal* literal = property->key()->AsLiteral();
4022
4023 if (literal != NULL && literal->handle()->IsSymbol()) {
4024 // ------------------------------------------------------------------
4025 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4026 // ------------------------------------------------------------------
4027
Steve Block6ded16b2010-05-10 14:33:55 +01004028 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004029
Steve Block6ded16b2010-05-10 14:33:55 +01004030 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4031 name->IsEqualTo(CStrVector("apply")) &&
4032 args->length() == 2 &&
4033 args->at(1)->AsVariableProxy() != NULL &&
4034 args->at(1)->AsVariableProxy()->IsArguments()) {
4035 // Use the optimized Function.prototype.apply that avoids
4036 // allocating lazily allocated arguments objects.
4037 CallApplyLazy(property->obj(),
4038 args->at(0),
4039 args->at(1)->AsVariableProxy(),
4040 node->position());
4041
4042 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004043 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004044 // Load the arguments.
4045 int arg_count = args->length();
4046 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004047 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004048 }
4049
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004050 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004051 // Set the name register and call the IC initialization code.
4052 __ mov(r2, Operand(name));
4053 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4054 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
4055 CodeForSourcePosition(node->position());
4056 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4057 __ ldr(cp, frame_->Context());
4058 frame_->EmitPush(r0);
4059 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004060
4061 } else {
4062 // -------------------------------------------
4063 // JavaScript example: 'array[index](1, 2, 3)'
4064 // -------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004065 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004066
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004067 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00004068 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004069 Load(property->key());
4070 EmitKeyedLoad();
4071 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004072 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004073 frame_->EmitPush(r0); // Function.
Steve Blocka7e24c12009-10-30 11:49:00 +00004074 LoadGlobalReceiver(r0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004075 // Call the function.
4076 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4077 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004078 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004079 // Load the arguments.
4080 int arg_count = args->length();
4081 for (int i = 0; i < arg_count; i++) {
4082 Load(args->at(i));
4083 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004084
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004085 // Set the name register and call the IC initialization code.
4086 Load(property->key());
4087 frame_->EmitPop(r2); // Function name.
4088
4089 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
4090 Handle<Code> stub = ComputeKeyedCallInitialize(arg_count, in_loop);
4091 CodeForSourcePosition(node->position());
4092 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4093 __ ldr(cp, frame_->Context());
4094 frame_->EmitPush(r0);
4095 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004096 }
4097
4098 } else {
4099 // ----------------------------------
4100 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4101 // ----------------------------------
4102
4103 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004104 Load(function);
4105
4106 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004107
4108 // Pass the global proxy as the receiver.
4109 LoadGlobalReceiver(r0);
4110
4111 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004112 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004113 frame_->EmitPush(r0);
4114 }
Steve Block6ded16b2010-05-10 14:33:55 +01004115 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004116}
4117
4118
4119void CodeGenerator::VisitCallNew(CallNew* node) {
4120#ifdef DEBUG
4121 int original_height = frame_->height();
4122#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004123 Comment cmnt(masm_, "[ CallNew");
4124
4125 // According to ECMA-262, section 11.2.2, page 44, the function
4126 // expression in new calls must be evaluated before the
4127 // arguments. This is different from ordinary calls, where the
4128 // actual function to call is resolved after the arguments have been
4129 // evaluated.
4130
4131 // Compute function to call and use the global object as the
4132 // receiver. There is no need to use the global proxy here because
4133 // it will always be replaced with a newly allocated object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004134 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004135 LoadGlobal();
4136
4137 // Push the arguments ("left-to-right") on the stack.
4138 ZoneList<Expression*>* args = node->arguments();
4139 int arg_count = args->length();
4140 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004141 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004142 }
4143
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004144 VirtualFrame::SpilledScope spilled_scope(frame_);
4145
Steve Blocka7e24c12009-10-30 11:49:00 +00004146 // r0: the number of arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004147 __ mov(r0, Operand(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004148 // Load the function into r1 as per calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004149 __ ldr(r1, frame_->ElementAt(arg_count + 1));
4150
4151 // Call the construct call builtin that handles allocation and
4152 // constructor invocation.
4153 CodeForSourcePosition(node->position());
4154 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004155 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004156
4157 // Discard old TOS value and push r0 on the stack (same as Pop(), push(r0)).
4158 __ str(r0, frame_->Top());
Steve Block6ded16b2010-05-10 14:33:55 +01004159 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004160}
4161
4162
4163void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004164 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004165 ASSERT(args->length() == 1);
4166 JumpTarget leave, null, function, non_function_constructor;
4167
4168 // Load the object into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004169 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004170 frame_->EmitPop(r0);
4171
4172 // If the object is a smi, we return null.
4173 __ tst(r0, Operand(kSmiTagMask));
4174 null.Branch(eq);
4175
4176 // Check that the object is a JS object but take special care of JS
4177 // functions to make sure they have 'Function' as their class.
4178 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE);
4179 null.Branch(lt);
4180
4181 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4182 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4183 // LAST_JS_OBJECT_TYPE.
4184 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4185 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
4186 __ cmp(r1, Operand(JS_FUNCTION_TYPE));
4187 function.Branch(eq);
4188
4189 // Check if the constructor in the map is a function.
4190 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
4191 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
4192 non_function_constructor.Branch(ne);
4193
4194 // The r0 register now contains the constructor function. Grab the
4195 // instance class name from there.
4196 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
4197 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
4198 frame_->EmitPush(r0);
4199 leave.Jump();
4200
4201 // Functions have class 'Function'.
4202 function.Bind();
4203 __ mov(r0, Operand(Factory::function_class_symbol()));
4204 frame_->EmitPush(r0);
4205 leave.Jump();
4206
4207 // Objects with a non-function constructor have class 'Object'.
4208 non_function_constructor.Bind();
4209 __ mov(r0, Operand(Factory::Object_symbol()));
4210 frame_->EmitPush(r0);
4211 leave.Jump();
4212
4213 // Non-JS objects have class null.
4214 null.Bind();
4215 __ LoadRoot(r0, Heap::kNullValueRootIndex);
4216 frame_->EmitPush(r0);
4217
4218 // All done.
4219 leave.Bind();
4220}
4221
4222
4223void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004224 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004225 ASSERT(args->length() == 1);
4226 JumpTarget leave;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004227 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004228 frame_->EmitPop(r0); // r0 contains object.
4229 // if (object->IsSmi()) return the object.
4230 __ tst(r0, Operand(kSmiTagMask));
4231 leave.Branch(eq);
4232 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
4233 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
4234 leave.Branch(ne);
4235 // Load the value.
4236 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
4237 leave.Bind();
4238 frame_->EmitPush(r0);
4239}
4240
4241
4242void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01004243 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004244 ASSERT(args->length() == 2);
4245 JumpTarget leave;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004246 Load(args->at(0)); // Load the object.
4247 Load(args->at(1)); // Load the value.
Steve Blocka7e24c12009-10-30 11:49:00 +00004248 frame_->EmitPop(r0); // r0 contains value
4249 frame_->EmitPop(r1); // r1 contains object
4250 // if (object->IsSmi()) return object.
4251 __ tst(r1, Operand(kSmiTagMask));
4252 leave.Branch(eq);
4253 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
4254 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
4255 leave.Branch(ne);
4256 // Store the value.
4257 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
4258 // Update the write barrier.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004259 __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004260 // Leave.
4261 leave.Bind();
4262 frame_->EmitPush(r0);
4263}
4264
4265
4266void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004267 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004268 Load(args->at(0));
4269 Register reg = frame_->PopToRegister();
4270 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004271 cc_reg_ = eq;
4272}
4273
4274
4275void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004276 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4277 ASSERT_EQ(args->length(), 3);
4278#ifdef ENABLE_LOGGING_AND_PROFILING
4279 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004280 Load(args->at(1));
4281 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004282 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004283 }
4284#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004285 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004286}
4287
4288
4289void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004290 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004291 Load(args->at(0));
4292 Register reg = frame_->PopToRegister();
4293 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004294 cc_reg_ = eq;
4295}
4296
4297
Steve Block8defd9f2010-07-08 12:39:36 +01004298// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004299void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4300 ASSERT(args->length() == 2);
4301 Load(args->at(0));
4302 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004303
4304 if (!CpuFeatures::IsSupported(VFP3)) {
4305 frame_->CallRuntime(Runtime::kMath_pow, 2);
4306 frame_->EmitPush(r0);
4307 } else {
4308 CpuFeatures::Scope scope(VFP3);
4309 JumpTarget runtime, done;
4310 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4311
4312 Register scratch1 = VirtualFrame::scratch0();
4313 Register scratch2 = VirtualFrame::scratch1();
4314
4315 // Get base and exponent to registers.
4316 Register exponent = frame_->PopToRegister();
4317 Register base = frame_->PopToRegister(exponent);
4318 Register heap_number_map = no_reg;
4319
4320 // Set the frame for the runtime jump target. The code below jumps to the
4321 // jump target label so the frame needs to be established before that.
4322 ASSERT(runtime.entry_frame() == NULL);
4323 runtime.set_entry_frame(frame_);
4324
4325 __ BranchOnNotSmi(exponent, &exponent_nonsmi);
4326 __ BranchOnNotSmi(base, &base_nonsmi);
4327
4328 heap_number_map = r6;
4329 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4330
4331 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4332 // d1.
4333 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4334 __ b(&powi);
4335
4336 __ bind(&base_nonsmi);
4337 // Exponent is smi and base is non smi. Get the double value from the base
4338 // into vfp register d1.
4339 __ ObjectToDoubleVFPRegister(base, d1,
4340 scratch1, scratch2, heap_number_map, s0,
4341 runtime.entry_label());
4342
4343 __ bind(&powi);
4344
4345 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004346 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004347
4348 // Get the absolute untagged value of the exponent and use that for the
4349 // calculation.
4350 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
4351 __ rsb(scratch1, scratch1, Operand(0), LeaveCC, mi); // Negate if negative.
4352 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4353
4354 // Run through all the bits in the exponent. The result is calculated in d0
4355 // and d1 holds base^(bit^2).
4356 Label more_bits;
4357 __ bind(&more_bits);
4358 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4359 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4360 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4361 __ b(ne, &more_bits);
4362
4363 // If exponent is positive we are done.
4364 __ cmp(exponent, Operand(0));
4365 __ b(ge, &allocate_return);
4366
4367 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4368 // case). However if d0 has reached infinity this will not provide the
4369 // correct result, so call runtime if that is the case.
4370 __ mov(scratch2, Operand(0x7FF00000));
4371 __ mov(scratch1, Operand(0));
4372 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
4373 __ vcmp(d0, d1);
4374 __ vmrs(pc);
4375 runtime.Branch(eq); // d0 reached infinity.
4376 __ vdiv(d0, d2, d0);
4377 __ b(&allocate_return);
4378
4379 __ bind(&exponent_nonsmi);
4380 // Special handling of raising to the power of -0.5 and 0.5. First check
4381 // that the value is a heap number and that the lower bits (which for both
4382 // values are zero).
4383 heap_number_map = r6;
4384 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4385 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4386 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4387 __ cmp(scratch1, heap_number_map);
4388 runtime.Branch(ne);
4389 __ tst(scratch2, scratch2);
4390 runtime.Branch(ne);
4391
4392 // Load the higher bits (which contains the floating point exponent).
4393 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4394
4395 // Compare exponent with -0.5.
4396 __ cmp(scratch1, Operand(0xbfe00000));
4397 __ b(ne, &not_minus_half);
4398
4399 // Get the double value from the base into vfp register d0.
4400 __ ObjectToDoubleVFPRegister(base, d0,
4401 scratch1, scratch2, heap_number_map, s0,
4402 runtime.entry_label(),
4403 AVOID_NANS_AND_INFINITIES);
4404
4405 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004406 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004407
4408 // Calculate the reciprocal of the square root. 1/sqrt(x) = sqrt(1/x).
4409 __ vdiv(d0, d2, d0);
4410 __ vsqrt(d0, d0);
4411
4412 __ b(&allocate_return);
4413
4414 __ bind(&not_minus_half);
4415 // Compare exponent with 0.5.
4416 __ cmp(scratch1, Operand(0x3fe00000));
4417 runtime.Branch(ne);
4418
4419 // Get the double value from the base into vfp register d0.
4420 __ ObjectToDoubleVFPRegister(base, d0,
4421 scratch1, scratch2, heap_number_map, s0,
4422 runtime.entry_label(),
4423 AVOID_NANS_AND_INFINITIES);
4424 __ vsqrt(d0, d0);
4425
4426 __ bind(&allocate_return);
4427 Register scratch3 = r5;
4428 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4429 heap_number_map, runtime.entry_label());
4430 __ mov(base, scratch3);
4431 done.Jump();
4432
4433 runtime.Bind();
4434
4435 // Push back the arguments again for the runtime call.
4436 frame_->EmitPush(base);
4437 frame_->EmitPush(exponent);
4438 frame_->CallRuntime(Runtime::kMath_pow, 2);
4439 __ Move(base, r0);
4440
4441 done.Bind();
4442 frame_->EmitPush(base);
4443 }
Steve Block6ded16b2010-05-10 14:33:55 +01004444}
4445
4446
Steve Block8defd9f2010-07-08 12:39:36 +01004447// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004448void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4449 ASSERT(args->length() == 1);
4450 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004451
4452 if (!CpuFeatures::IsSupported(VFP3)) {
4453 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4454 frame_->EmitPush(r0);
4455 } else {
4456 CpuFeatures::Scope scope(VFP3);
4457 JumpTarget runtime, done;
4458
4459 Register scratch1 = VirtualFrame::scratch0();
4460 Register scratch2 = VirtualFrame::scratch1();
4461
4462 // Get the value from the frame.
4463 Register tos = frame_->PopToRegister();
4464
4465 // Set the frame for the runtime jump target. The code below jumps to the
4466 // jump target label so the frame needs to be established before that.
4467 ASSERT(runtime.entry_frame() == NULL);
4468 runtime.set_entry_frame(frame_);
4469
4470 Register heap_number_map = r6;
4471 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4472
4473 // Get the double value from the heap number into vfp register d0.
4474 __ ObjectToDoubleVFPRegister(tos, d0,
4475 scratch1, scratch2, heap_number_map, s0,
4476 runtime.entry_label());
4477
4478 // Calculate the square root of d0 and place result in a heap number object.
4479 __ vsqrt(d0, d0);
4480 __ AllocateHeapNumberWithValue(
4481 tos, d0, scratch1, scratch2, heap_number_map, runtime.entry_label());
4482 done.Jump();
4483
4484 runtime.Bind();
4485 // Push back the argument again for the runtime call.
4486 frame_->EmitPush(tos);
4487 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4488 __ Move(tos, r0);
4489
4490 done.Bind();
4491 frame_->EmitPush(tos);
4492 }
Steve Block6ded16b2010-05-10 14:33:55 +01004493}
4494
4495
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004496class DeferredStringCharCodeAt : public DeferredCode {
4497 public:
4498 DeferredStringCharCodeAt(Register object,
4499 Register index,
4500 Register scratch,
4501 Register result)
4502 : result_(result),
4503 char_code_at_generator_(object,
4504 index,
4505 scratch,
4506 result,
4507 &need_conversion_,
4508 &need_conversion_,
4509 &index_out_of_range_,
4510 STRING_INDEX_IS_NUMBER) {}
4511
4512 StringCharCodeAtGenerator* fast_case_generator() {
4513 return &char_code_at_generator_;
4514 }
4515
4516 virtual void Generate() {
4517 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4518 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4519
4520 __ bind(&need_conversion_);
4521 // Move the undefined value into the result register, which will
4522 // trigger conversion.
4523 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4524 __ jmp(exit_label());
4525
4526 __ bind(&index_out_of_range_);
4527 // When the index is out of range, the spec requires us to return
4528 // NaN.
4529 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4530 __ jmp(exit_label());
4531 }
4532
4533 private:
4534 Register result_;
4535
4536 Label need_conversion_;
4537 Label index_out_of_range_;
4538
4539 StringCharCodeAtGenerator char_code_at_generator_;
4540};
4541
4542
4543// This generates code that performs a String.prototype.charCodeAt() call
4544// or returns a smi in order to trigger conversion.
4545void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
4546 VirtualFrame::SpilledScope spilled_scope(frame_);
4547 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004548 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004549
Leon Clarkef7060e22010-06-03 12:02:55 +01004550 Load(args->at(0));
4551 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004552
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004553 Register index = r1;
4554 Register object = r2;
Steve Blockd0582a62009-12-15 09:54:21 +00004555
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004556 frame_->EmitPop(r1);
4557 frame_->EmitPop(r2);
Steve Blockd0582a62009-12-15 09:54:21 +00004558
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004559 // We need two extra registers.
4560 Register scratch = r3;
4561 Register result = r0;
4562
4563 DeferredStringCharCodeAt* deferred =
4564 new DeferredStringCharCodeAt(object,
4565 index,
4566 scratch,
4567 result);
4568 deferred->fast_case_generator()->GenerateFast(masm_);
4569 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004570 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004571}
4572
4573
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004574class DeferredStringCharFromCode : public DeferredCode {
4575 public:
4576 DeferredStringCharFromCode(Register code,
4577 Register result)
4578 : char_from_code_generator_(code, result) {}
4579
4580 StringCharFromCodeGenerator* fast_case_generator() {
4581 return &char_from_code_generator_;
4582 }
4583
4584 virtual void Generate() {
4585 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4586 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4587 }
4588
4589 private:
4590 StringCharFromCodeGenerator char_from_code_generator_;
4591};
4592
4593
4594// Generates code for creating a one-char string from a char code.
4595void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
4596 VirtualFrame::SpilledScope spilled_scope(frame_);
4597 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004598 ASSERT(args->length() == 1);
4599
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004600 Load(args->at(0));
4601
Steve Block6ded16b2010-05-10 14:33:55 +01004602 Register code = r1;
Steve Block6ded16b2010-05-10 14:33:55 +01004603 Register result = r0;
4604
Steve Block6ded16b2010-05-10 14:33:55 +01004605 frame_->EmitPop(code);
4606
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004607 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4608 code, result);
4609 deferred->fast_case_generator()->GenerateFast(masm_);
4610 deferred->BindExit();
4611 frame_->EmitPush(result);
4612}
4613
4614
4615class DeferredStringCharAt : public DeferredCode {
4616 public:
4617 DeferredStringCharAt(Register object,
4618 Register index,
4619 Register scratch1,
4620 Register scratch2,
4621 Register result)
4622 : result_(result),
4623 char_at_generator_(object,
4624 index,
4625 scratch1,
4626 scratch2,
4627 result,
4628 &need_conversion_,
4629 &need_conversion_,
4630 &index_out_of_range_,
4631 STRING_INDEX_IS_NUMBER) {}
4632
4633 StringCharAtGenerator* fast_case_generator() {
4634 return &char_at_generator_;
4635 }
4636
4637 virtual void Generate() {
4638 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4639 char_at_generator_.GenerateSlow(masm(), call_helper);
4640
4641 __ bind(&need_conversion_);
4642 // Move smi zero into the result register, which will trigger
4643 // conversion.
4644 __ mov(result_, Operand(Smi::FromInt(0)));
4645 __ jmp(exit_label());
4646
4647 __ bind(&index_out_of_range_);
4648 // When the index is out of range, the spec requires us to return
4649 // the empty string.
4650 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4651 __ jmp(exit_label());
4652 }
4653
4654 private:
4655 Register result_;
4656
4657 Label need_conversion_;
4658 Label index_out_of_range_;
4659
4660 StringCharAtGenerator char_at_generator_;
4661};
4662
4663
4664// This generates code that performs a String.prototype.charAt() call
4665// or returns a smi in order to trigger conversion.
4666void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
4667 VirtualFrame::SpilledScope spilled_scope(frame_);
4668 Comment(masm_, "[ GenerateStringCharAt");
4669 ASSERT(args->length() == 2);
4670
4671 Load(args->at(0));
4672 Load(args->at(1));
4673
4674 Register index = r1;
4675 Register object = r2;
4676
4677 frame_->EmitPop(r1);
4678 frame_->EmitPop(r2);
4679
4680 // We need three extra registers.
4681 Register scratch1 = r3;
4682 Register scratch2 = r4;
4683 Register result = r0;
4684
4685 DeferredStringCharAt* deferred =
4686 new DeferredStringCharAt(object,
4687 index,
4688 scratch1,
4689 scratch2,
4690 result);
4691 deferred->fast_case_generator()->GenerateFast(masm_);
4692 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004693 frame_->EmitPush(result);
4694}
4695
4696
Steve Blocka7e24c12009-10-30 11:49:00 +00004697void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004698 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004699 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004700 JumpTarget answer;
4701 // We need the CC bits to come out as not_equal in the case where the
4702 // object is a smi. This can't be done with the usual test opcode so
4703 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004704 Register possible_array = frame_->PopToRegister();
4705 Register scratch = VirtualFrame::scratch0();
4706 __ and_(scratch, possible_array, Operand(kSmiTagMask));
4707 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00004708 answer.Branch(ne);
4709 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004710 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004711 answer.Bind();
4712 cc_reg_ = eq;
4713}
4714
4715
Andrei Popescu402d9372010-02-26 13:31:12 +00004716void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004717 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004718 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00004719 JumpTarget answer;
4720 // We need the CC bits to come out as not_equal in the case where the
4721 // object is a smi. This can't be done with the usual test opcode so
4722 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004723 Register possible_regexp = frame_->PopToRegister();
4724 Register scratch = VirtualFrame::scratch0();
4725 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
4726 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00004727 answer.Branch(ne);
4728 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004729 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00004730 answer.Bind();
4731 cc_reg_ = eq;
4732}
4733
4734
Steve Blockd0582a62009-12-15 09:54:21 +00004735void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
4736 // This generates a fast version of:
4737 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00004738 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004739 Load(args->at(0));
4740 Register possible_object = frame_->PopToRegister();
4741 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00004742 false_target()->Branch(eq);
4743
4744 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004745 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00004746 true_target()->Branch(eq);
4747
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004748 Register map_reg = VirtualFrame::scratch0();
4749 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00004750 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004751 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
4752 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01004753 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00004754
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004755 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
4756 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00004757 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004758 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00004759 cc_reg_ = le;
4760}
4761
4762
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004763void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
4764 // This generates a fast version of:
4765 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
4766 // typeof(arg) == function).
4767 // It includes undetectable objects (as opposed to IsObject).
4768 ASSERT(args->length() == 1);
4769 Load(args->at(0));
4770 Register value = frame_->PopToRegister();
4771 __ tst(value, Operand(kSmiTagMask));
4772 false_target()->Branch(eq);
4773 // Check that this is an object.
4774 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
4775 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
4776 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
4777 cc_reg_ = ge;
4778}
4779
4780
Steve Blockd0582a62009-12-15 09:54:21 +00004781void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
4782 // This generates a fast version of:
4783 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00004784 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004785 Load(args->at(0));
4786 Register possible_function = frame_->PopToRegister();
4787 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00004788 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004789 Register map_reg = VirtualFrame::scratch0();
4790 Register scratch = VirtualFrame::scratch1();
4791 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00004792 cc_reg_ = eq;
4793}
4794
4795
Leon Clarked91b9f72010-01-27 17:25:45 +00004796void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004797 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004798 Load(args->at(0));
4799 Register possible_undetectable = frame_->PopToRegister();
4800 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00004801 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004802 Register scratch = VirtualFrame::scratch0();
4803 __ ldr(scratch,
4804 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
4805 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
4806 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00004807 cc_reg_ = ne;
4808}
4809
4810
Steve Blocka7e24c12009-10-30 11:49:00 +00004811void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004812 ASSERT(args->length() == 0);
4813
Leon Clarkef7060e22010-06-03 12:02:55 +01004814 Register scratch0 = VirtualFrame::scratch0();
4815 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004816 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004817 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004818
4819 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01004820 __ ldr(scratch1,
4821 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
4822 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4823 __ ldr(scratch0,
4824 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00004825
4826 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004827 __ ldr(scratch1,
4828 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
4829 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00004830 cc_reg_ = eq;
4831}
4832
4833
4834void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004835 ASSERT(args->length() == 0);
4836
Leon Clarkef7060e22010-06-03 12:02:55 +01004837 Register tos = frame_->GetTOSRegister();
4838 Register scratch0 = VirtualFrame::scratch0();
4839 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004840
Steve Block6ded16b2010-05-10 14:33:55 +01004841 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004842 __ ldr(scratch0,
4843 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4844 __ ldr(scratch1,
4845 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
4846 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4847
4848 // Get the number of formal parameters.
4849 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01004850
4851 // Arguments adaptor case: Read the arguments length from the
4852 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01004853 __ ldr(tos,
4854 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
4855 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01004856
Leon Clarkef7060e22010-06-03 12:02:55 +01004857 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004858}
4859
4860
Steve Block6ded16b2010-05-10 14:33:55 +01004861void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
4862 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004863 ASSERT(args->length() == 1);
4864
4865 // Satisfy contract with ArgumentsAccessStub:
4866 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004867 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004868 frame_->EmitPop(r1);
Andrei Popescu31002712010-02-23 13:46:05 +00004869 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00004870
4871 // Call the shared stub to get to arguments[key].
4872 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
4873 frame_->CallStub(&stub, 0);
4874 frame_->EmitPush(r0);
4875}
4876
4877
Steve Block6ded16b2010-05-10 14:33:55 +01004878void CodeGenerator::GenerateRandomHeapNumber(
4879 ZoneList<Expression*>* args) {
4880 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00004881 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01004882
4883 Label slow_allocate_heapnumber;
4884 Label heapnumber_allocated;
4885
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004886 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
4887 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01004888 __ jmp(&heapnumber_allocated);
4889
4890 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004891 // Allocate a heap number.
4892 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01004893 __ mov(r4, Operand(r0));
4894
4895 __ bind(&heapnumber_allocated);
4896
4897 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
4898 // by computing:
4899 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
4900 if (CpuFeatures::IsSupported(VFP3)) {
4901 __ PrepareCallCFunction(0, r1);
4902 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
4903
4904 CpuFeatures::Scope scope(VFP3);
4905 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
4906 // Create this constant using mov/orr to avoid PC relative load.
4907 __ mov(r1, Operand(0x41000000));
4908 __ orr(r1, r1, Operand(0x300000));
4909 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
4910 __ vmov(d7, r0, r1);
4911 // Move 0x4130000000000000 to VFP.
4912 __ mov(r0, Operand(0));
4913 __ vmov(d8, r0, r1);
4914 // Subtract and store the result in the heap number.
4915 __ vsub(d7, d7, d8);
4916 __ sub(r0, r4, Operand(kHeapObjectTag));
4917 __ vstr(d7, r0, HeapNumber::kValueOffset);
4918 frame_->EmitPush(r4);
4919 } else {
4920 __ mov(r0, Operand(r4));
4921 __ PrepareCallCFunction(1, r1);
4922 __ CallCFunction(
4923 ExternalReference::fill_heap_number_with_random_function(), 1);
4924 frame_->EmitPush(r0);
4925 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004926}
4927
4928
Steve Blockd0582a62009-12-15 09:54:21 +00004929void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
4930 ASSERT_EQ(2, args->length());
4931
4932 Load(args->at(0));
4933 Load(args->at(1));
4934
Andrei Popescu31002712010-02-23 13:46:05 +00004935 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004936 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00004937 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004938 frame_->EmitPush(r0);
4939}
4940
4941
Leon Clarkee46be812010-01-19 14:06:41 +00004942void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
4943 ASSERT_EQ(3, args->length());
4944
4945 Load(args->at(0));
4946 Load(args->at(1));
4947 Load(args->at(2));
4948
Andrei Popescu31002712010-02-23 13:46:05 +00004949 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004950 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00004951 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004952 frame_->EmitPush(r0);
4953}
4954
4955
4956void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
4957 ASSERT_EQ(2, args->length());
4958
4959 Load(args->at(0));
4960 Load(args->at(1));
4961
Leon Clarked91b9f72010-01-27 17:25:45 +00004962 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004963 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00004964 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004965 frame_->EmitPush(r0);
4966}
4967
4968
4969void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
4970 ASSERT_EQ(4, args->length());
4971
4972 Load(args->at(0));
4973 Load(args->at(1));
4974 Load(args->at(2));
4975 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01004976 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004977 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01004978 frame_->CallStub(&stub, 4);
4979 frame_->EmitPush(r0);
4980}
Leon Clarkee46be812010-01-19 14:06:41 +00004981
Steve Block6ded16b2010-05-10 14:33:55 +01004982
4983void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
4984 // No stub. This code only occurs a few times in regexp.js.
4985 const int kMaxInlineLength = 100;
4986 ASSERT_EQ(3, args->length());
4987 Load(args->at(0)); // Size of array, smi.
4988 Load(args->at(1)); // "index" property value.
4989 Load(args->at(2)); // "input" property value.
4990 {
4991 VirtualFrame::SpilledScope spilled_scope(frame_);
4992 Label slowcase;
4993 Label done;
4994 __ ldr(r1, MemOperand(sp, kPointerSize * 2));
4995 STATIC_ASSERT(kSmiTag == 0);
4996 STATIC_ASSERT(kSmiTagSize == 1);
4997 __ tst(r1, Operand(kSmiTagMask));
4998 __ b(ne, &slowcase);
4999 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
5000 __ b(hi, &slowcase);
5001 // Smi-tagging is equivalent to multiplying by 2.
5002 // Allocate RegExpResult followed by FixedArray with size in ebx.
5003 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
5004 // Elements: [Map][Length][..elements..]
5005 // Size of JSArray with two in-object properties and the header of a
5006 // FixedArray.
5007 int objects_size =
5008 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
5009 __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
5010 __ add(r2, r5, Operand(objects_size));
Kristian Monsen25f61362010-05-21 11:50:48 +01005011 __ AllocateInNewSpace(
5012 r2, // In: Size, in words.
5013 r0, // Out: Start of allocation (tagged).
5014 r3, // Scratch register.
5015 r4, // Scratch register.
5016 &slowcase,
5017 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Block6ded16b2010-05-10 14:33:55 +01005018 // r0: Start of allocated area, object-tagged.
5019 // r1: Number of elements in array, as smi.
5020 // r5: Number of elements, untagged.
5021
5022 // Set JSArray map to global.regexp_result_map().
5023 // Set empty properties FixedArray.
5024 // Set elements to point to FixedArray allocated right after the JSArray.
5025 // Interleave operations for better latency.
5026 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
5027 __ add(r3, r0, Operand(JSRegExpResult::kSize));
5028 __ mov(r4, Operand(Factory::empty_fixed_array()));
5029 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
5030 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
5031 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
5032 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
5033 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
5034
5035 // Set input, index and length fields from arguments.
5036 __ ldm(ia_w, sp, static_cast<RegList>(r2.bit() | r4.bit()));
5037 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
5038 __ add(sp, sp, Operand(kPointerSize));
5039 __ str(r4, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
5040 __ str(r2, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
5041
5042 // Fill out the elements FixedArray.
5043 // r0: JSArray, tagged.
5044 // r3: FixedArray, tagged.
5045 // r5: Number of elements in array, untagged.
5046
5047 // Set map.
5048 __ mov(r2, Operand(Factory::fixed_array_map()));
5049 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
5050 // Set FixedArray length.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005051 __ mov(r6, Operand(r5, LSL, kSmiTagSize));
5052 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005053 // Fill contents of fixed-array with the-hole.
5054 __ mov(r2, Operand(Factory::the_hole_value()));
5055 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5056 // Fill fixed array elements with hole.
5057 // r0: JSArray, tagged.
5058 // r2: the hole.
5059 // r3: Start of elements in FixedArray.
5060 // r5: Number of elements to fill.
5061 Label loop;
5062 __ tst(r5, Operand(r5));
5063 __ bind(&loop);
5064 __ b(le, &done); // Jump if r1 is negative or zero.
5065 __ sub(r5, r5, Operand(1), SetCC);
5066 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
5067 __ jmp(&loop);
5068
5069 __ bind(&slowcase);
5070 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
5071
5072 __ bind(&done);
5073 }
5074 frame_->Forget(3);
5075 frame_->EmitPush(r0);
5076}
5077
5078
5079class DeferredSearchCache: public DeferredCode {
5080 public:
5081 DeferredSearchCache(Register dst, Register cache, Register key)
5082 : dst_(dst), cache_(cache), key_(key) {
5083 set_comment("[ DeferredSearchCache");
5084 }
5085
5086 virtual void Generate();
5087
5088 private:
5089 Register dst_, cache_, key_;
5090};
5091
5092
5093void DeferredSearchCache::Generate() {
5094 __ Push(cache_, key_);
5095 __ CallRuntime(Runtime::kGetFromCache, 2);
5096 if (!dst_.is(r0)) {
5097 __ mov(dst_, r0);
5098 }
5099}
5100
5101
5102void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5103 ASSERT_EQ(2, args->length());
5104
5105 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5106 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5107
5108 Handle<FixedArray> jsfunction_result_caches(
5109 Top::global_context()->jsfunction_result_caches());
5110 if (jsfunction_result_caches->length() <= cache_id) {
5111 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005112 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005113 return;
5114 }
5115
5116 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005117
5118 VirtualFrame::SpilledScope spilled_scope(frame_);
5119
Steve Block6ded16b2010-05-10 14:33:55 +01005120 frame_->EmitPop(r2);
5121
5122 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
5123 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
5124 __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5125 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id)));
5126
5127 DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2);
5128
5129 const int kFingerOffset =
5130 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
5131 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
5132 __ ldr(r0, FieldMemOperand(r1, kFingerOffset));
5133 // r0 now holds finger offset as a smi.
5134 __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5135 // r3 now points to the start of fixed array elements.
5136 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5137 // Note side effect of PreIndex: r3 now points to the key of the pair.
5138 __ cmp(r2, r0);
5139 deferred->Branch(ne);
5140
5141 __ ldr(r0, MemOperand(r3, kPointerSize));
5142
5143 deferred->BindExit();
Leon Clarkee46be812010-01-19 14:06:41 +00005144 frame_->EmitPush(r0);
5145}
5146
5147
Andrei Popescu402d9372010-02-26 13:31:12 +00005148void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5149 ASSERT_EQ(args->length(), 1);
5150
5151 // Load the argument on the stack and jump to the runtime.
5152 Load(args->at(0));
5153
Steve Block6ded16b2010-05-10 14:33:55 +01005154 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005155 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005156 frame_->CallStub(&stub, 1);
5157 frame_->EmitPush(r0);
5158}
5159
5160
5161class DeferredSwapElements: public DeferredCode {
5162 public:
5163 DeferredSwapElements(Register object, Register index1, Register index2)
5164 : object_(object), index1_(index1), index2_(index2) {
5165 set_comment("[ DeferredSwapElements");
5166 }
5167
5168 virtual void Generate();
5169
5170 private:
5171 Register object_, index1_, index2_;
5172};
5173
5174
5175void DeferredSwapElements::Generate() {
5176 __ push(object_);
5177 __ push(index1_);
5178 __ push(index2_);
5179 __ CallRuntime(Runtime::kSwapElements, 3);
5180}
5181
5182
5183void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5184 Comment cmnt(masm_, "[ GenerateSwapElements");
5185
5186 ASSERT_EQ(3, args->length());
5187
5188 Load(args->at(0));
5189 Load(args->at(1));
5190 Load(args->at(2));
5191
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005192 VirtualFrame::SpilledScope spilled_scope(frame_);
5193
Steve Block6ded16b2010-05-10 14:33:55 +01005194 Register index2 = r2;
5195 Register index1 = r1;
5196 Register object = r0;
5197 Register tmp1 = r3;
5198 Register tmp2 = r4;
5199
5200 frame_->EmitPop(index2);
5201 frame_->EmitPop(index1);
5202 frame_->EmitPop(object);
5203
5204 DeferredSwapElements* deferred =
5205 new DeferredSwapElements(object, index1, index2);
5206
5207 // Fetch the map and check if array is in fast case.
5208 // Check that object doesn't require security checks and
5209 // has no indexed interceptor.
5210 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5211 deferred->Branch(lt);
5212 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5213 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
5214 deferred->Branch(nz);
5215
5216 // Check the object's elements are in fast case.
5217 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5218 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5219 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5220 __ cmp(tmp2, ip);
5221 deferred->Branch(ne);
5222
5223 // Smi-tagging is equivalent to multiplying by 2.
5224 STATIC_ASSERT(kSmiTag == 0);
5225 STATIC_ASSERT(kSmiTagSize == 1);
5226
5227 // Check that both indices are smis.
5228 __ mov(tmp2, index1);
5229 __ orr(tmp2, tmp2, index2);
5230 __ tst(tmp2, Operand(kSmiTagMask));
5231 deferred->Branch(nz);
5232
5233 // Bring the offsets into the fixed array in tmp1 into index1 and
5234 // index2.
5235 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5236 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5237 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5238
5239 // Swap elements.
5240 Register tmp3 = object;
5241 object = no_reg;
5242 __ ldr(tmp3, MemOperand(tmp1, index1));
5243 __ ldr(tmp2, MemOperand(tmp1, index2));
5244 __ str(tmp3, MemOperand(tmp1, index2));
5245 __ str(tmp2, MemOperand(tmp1, index1));
5246
5247 Label done;
5248 __ InNewSpace(tmp1, tmp2, eq, &done);
5249 // Possible optimization: do a check that both values are Smis
5250 // (or them and test against Smi mask.)
5251
5252 __ mov(tmp2, tmp1);
5253 RecordWriteStub recordWrite1(tmp1, index1, tmp3);
5254 __ CallStub(&recordWrite1);
5255
5256 RecordWriteStub recordWrite2(tmp2, index2, tmp3);
5257 __ CallStub(&recordWrite2);
5258
5259 __ bind(&done);
5260
5261 deferred->BindExit();
5262 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5263 frame_->EmitPush(tmp1);
5264}
5265
5266
5267void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5268 Comment cmnt(masm_, "[ GenerateCallFunction");
5269
5270 ASSERT(args->length() >= 2);
5271
5272 int n_args = args->length() - 2; // for receiver and function.
5273 Load(args->at(0)); // receiver
5274 for (int i = 0; i < n_args; i++) {
5275 Load(args->at(i + 1));
5276 }
5277 Load(args->at(n_args + 1)); // function
5278 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005279 frame_->EmitPush(r0);
5280}
5281
5282
5283void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5284 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005285 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005286 if (CpuFeatures::IsSupported(VFP3)) {
5287 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5288 frame_->SpillAllButCopyTOSToR0();
5289 frame_->CallStub(&stub, 1);
5290 } else {
5291 frame_->CallRuntime(Runtime::kMath_sin, 1);
5292 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005293 frame_->EmitPush(r0);
5294}
5295
5296
5297void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5298 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005299 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005300 if (CpuFeatures::IsSupported(VFP3)) {
5301 TranscendentalCacheStub stub(TranscendentalCache::COS);
5302 frame_->SpillAllButCopyTOSToR0();
5303 frame_->CallStub(&stub, 1);
5304 } else {
5305 frame_->CallRuntime(Runtime::kMath_cos, 1);
5306 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005307 frame_->EmitPush(r0);
5308}
5309
5310
Steve Blocka7e24c12009-10-30 11:49:00 +00005311void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005312 ASSERT(args->length() == 2);
5313
5314 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005315 Load(args->at(0));
5316 Load(args->at(1));
5317 Register lhs = frame_->PopToRegister();
5318 Register rhs = frame_->PopToRegister(lhs);
5319 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005320 cc_reg_ = eq;
5321}
5322
5323
5324void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5325#ifdef DEBUG
5326 int original_height = frame_->height();
5327#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005328 if (CheckForInlineRuntimeCall(node)) {
5329 ASSERT((has_cc() && frame_->height() == original_height) ||
5330 (!has_cc() && frame_->height() == original_height + 1));
5331 return;
5332 }
5333
5334 ZoneList<Expression*>* args = node->arguments();
5335 Comment cmnt(masm_, "[ CallRuntime");
5336 Runtime::Function* function = node->function();
5337
5338 if (function == NULL) {
5339 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005340 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005341 Register scratch = VirtualFrame::scratch0();
5342 __ ldr(scratch, GlobalObject());
5343 Register builtins = frame_->GetTOSRegister();
5344 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5345 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005346 }
5347
5348 // Push the arguments ("left-to-right").
5349 int arg_count = args->length();
5350 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005351 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005352 }
5353
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005354 VirtualFrame::SpilledScope spilled_scope(frame_);
5355
Steve Blocka7e24c12009-10-30 11:49:00 +00005356 if (function == NULL) {
5357 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005358 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005359 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
5360 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
5361 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5362 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005363 frame_->EmitPush(r0);
5364 } else {
5365 // Call the C runtime function.
5366 frame_->CallRuntime(function, arg_count);
5367 frame_->EmitPush(r0);
5368 }
Steve Block6ded16b2010-05-10 14:33:55 +01005369 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005370}
5371
5372
5373void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5374#ifdef DEBUG
5375 int original_height = frame_->height();
5376#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005377 Comment cmnt(masm_, "[ UnaryOperation");
5378
5379 Token::Value op = node->op();
5380
5381 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005382 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005383 // LoadCondition may (and usually does) leave a test and branch to
5384 // be emitted by the caller. In that case, negate the condition.
5385 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5386
5387 } else if (op == Token::DELETE) {
5388 Property* property = node->expression()->AsProperty();
5389 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5390 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005391 Load(property->obj());
5392 Load(property->key());
Steve Blockd0582a62009-12-15 09:54:21 +00005393 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005394 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005395
5396 } else if (variable != NULL) {
5397 Slot* slot = variable->slot();
5398 if (variable->is_global()) {
5399 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005400 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005401 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005402 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005403
5404 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5405 // lookup the context holding the named variable
5406 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005407 frame_->EmitPush(Operand(variable->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005408 frame_->CallRuntime(Runtime::kLookupContext, 2);
5409 // r0: context
5410 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005411 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005412 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005413 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005414
5415 } else {
5416 // Default: Result of deleting non-global, not dynamically
5417 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005418 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005419 }
5420
5421 } else {
5422 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005423 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005424 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005425 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005426 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005427
5428 } else if (op == Token::TYPEOF) {
5429 // Special case for loading the typeof expression; see comment on
5430 // LoadTypeofExpression().
5431 LoadTypeofExpression(node->expression());
5432 frame_->CallRuntime(Runtime::kTypeof, 1);
5433 frame_->EmitPush(r0); // r0 has result
5434
5435 } else {
Leon Clarkeac952652010-07-15 11:15:24 +01005436 bool can_overwrite =
Leon Clarke4515c472010-02-03 11:58:03 +00005437 (node->expression()->AsBinaryOperation() != NULL &&
5438 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Leon Clarkeac952652010-07-15 11:15:24 +01005439 UnaryOverwriteMode overwrite =
5440 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5441
5442 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005443 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005444 switch (op) {
5445 case Token::NOT:
5446 case Token::DELETE:
5447 case Token::TYPEOF:
5448 UNREACHABLE(); // handled above
5449 break;
5450
5451 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005452 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005453 GenericUnaryOpStub stub(
5454 Token::SUB,
5455 overwrite,
5456 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005457 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005458 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005459 break;
5460 }
5461
5462 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005463 Register tos = frame_->PopToRegister();
5464 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005465 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005466 // Smi check.
5467 __ tst(tos, Operand(kSmiTagMask));
5468 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00005469
Steve Block8defd9f2010-07-08 12:39:36 +01005470 __ mvn(tos, Operand(tos));
5471 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5472 frame_->EmitPush(tos);
5473 // The fast case is the first to jump to the continue label, so it gets
5474 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00005475 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005476
Steve Block8defd9f2010-07-08 12:39:36 +01005477 not_smi_label.Bind();
5478 frame_->SpillAll();
5479 __ Move(r0, tos);
5480 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
5481 frame_->CallStub(&stub, 0);
5482 frame_->EmitPush(r0);
5483
Steve Blocka7e24c12009-10-30 11:49:00 +00005484 continue_label.Bind();
5485 break;
5486 }
5487
5488 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005489 frame_->Drop();
5490 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005491 break;
5492
5493 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01005494 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00005495 // Smi check.
5496 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005497 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005498 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01005499
Steve Blockd0582a62009-12-15 09:54:21 +00005500 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01005501 frame_->EmitPush(r0);
5502
Steve Blocka7e24c12009-10-30 11:49:00 +00005503 continue_label.Bind();
5504 break;
5505 }
5506 default:
5507 UNREACHABLE();
5508 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005509 }
5510 ASSERT(!has_valid_frame() ||
5511 (has_cc() && frame_->height() == original_height) ||
5512 (!has_cc() && frame_->height() == original_height + 1));
5513}
5514
5515
5516void CodeGenerator::VisitCountOperation(CountOperation* node) {
5517#ifdef DEBUG
5518 int original_height = frame_->height();
5519#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005520 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01005521 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00005522
5523 bool is_postfix = node->is_postfix();
5524 bool is_increment = node->op() == Token::INC;
5525
5526 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
5527 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005528 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00005529
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005530 if (!is_const && is_slot && type_info(var->slot()).IsSmi()) {
5531 // The type info declares that this variable is always a Smi. That
5532 // means it is a Smi both before and after the increment/decrement.
5533 // Lets make use of that to make a very minimal count.
5534 Reference target(this, node->expression(), !is_const);
5535 ASSERT(!target.is_illegal());
5536 target.GetValue(); // Pushes the value.
5537 Register value = frame_->PopToRegister();
5538 if (is_postfix) frame_->EmitPush(value);
5539 if (is_increment) {
5540 __ add(value, value, Operand(Smi::FromInt(1)));
5541 } else {
5542 __ sub(value, value, Operand(Smi::FromInt(1)));
5543 }
5544 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01005545 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005546 if (is_postfix) frame_->Pop();
5547 ASSERT_EQ(original_height + 1, frame_->height());
5548 return;
5549 }
5550
5551 // If it's a postfix expression and its result is not ignored and the
5552 // reference is non-trivial, then push a placeholder on the stack now
5553 // to hold the result of the expression.
5554 bool placeholder_pushed = false;
5555 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005556 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005557 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00005558 }
5559
Leon Clarked91b9f72010-01-27 17:25:45 +00005560 // A constant reference is not saved to, so a constant reference is not a
5561 // compound assignment reference.
5562 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00005563 if (target.is_illegal()) {
5564 // Spoof the virtual frame to have the expected height (one higher
5565 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005566 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01005567 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005568 return;
5569 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005570
Kristian Monsen25f61362010-05-21 11:50:48 +01005571 // This pushes 0, 1 or 2 words on the object to be used later when updating
5572 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01005573 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005574
5575 JumpTarget slow;
5576 JumpTarget exit;
5577
Kristian Monsen25f61362010-05-21 11:50:48 +01005578 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00005579
5580 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005581 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005582 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005583 } else if (is_postfix) {
5584 frame_->EmitPush(value);
5585 __ mov(VirtualFrame::scratch0(), value);
5586 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00005587 }
5588
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005589 // Check for smi operand.
5590 __ tst(value, Operand(kSmiTagMask));
5591 slow.Branch(ne);
5592
Steve Blocka7e24c12009-10-30 11:49:00 +00005593 // Perform optimistic increment/decrement.
5594 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005595 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005596 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01005597 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005598 }
5599
5600 // If the increment/decrement didn't overflow, we're done.
5601 exit.Branch(vc);
5602
5603 // Revert optimistic increment/decrement.
5604 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005605 __ sub(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005606 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01005607 __ add(value, value, Operand(Smi::FromInt(1)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005608 }
5609
Kristian Monsen25f61362010-05-21 11:50:48 +01005610 // Slow case: Convert to number. At this point the
5611 // value to be incremented is in the value register..
Steve Blocka7e24c12009-10-30 11:49:00 +00005612 slow.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01005613
5614 // Convert the operand to a number.
5615 frame_->EmitPush(value);
5616
Steve Blocka7e24c12009-10-30 11:49:00 +00005617 {
Kristian Monsen25f61362010-05-21 11:50:48 +01005618 VirtualFrame::SpilledScope spilled(frame_);
Steve Blockd0582a62009-12-15 09:54:21 +00005619 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01005620
5621 if (is_postfix) {
5622 // Postfix: store to result (on the stack).
5623 __ str(r0, frame_->ElementAt(target.size()));
5624 }
5625
5626 // Compute the new value.
5627 frame_->EmitPush(r0);
5628 frame_->EmitPush(Operand(Smi::FromInt(1)));
5629 if (is_increment) {
5630 frame_->CallRuntime(Runtime::kNumberAdd, 2);
5631 } else {
5632 frame_->CallRuntime(Runtime::kNumberSub, 2);
5633 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005634 }
5635
Kristian Monsen25f61362010-05-21 11:50:48 +01005636 __ Move(value, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005637 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01005638 // At this point the answer is in the value register.
Steve Blocka7e24c12009-10-30 11:49:00 +00005639 exit.Bind();
Kristian Monsen25f61362010-05-21 11:50:48 +01005640 frame_->EmitPush(value);
5641 // Set the target with the result, leaving the result on
5642 // top of the stack. Removes the target from the stack if
5643 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01005644 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00005645 }
5646
5647 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01005648 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01005649 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005650}
5651
5652
Steve Block6ded16b2010-05-10 14:33:55 +01005653void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005654 // According to ECMA-262 section 11.11, page 58, the binary logical
5655 // operators must yield the result of one of the two expressions
5656 // before any ToBoolean() conversions. This means that the value
5657 // produced by a && or || operator is not necessarily a boolean.
5658
5659 // NOTE: If the left hand side produces a materialized value (not in
5660 // the CC register), we force the right hand side to do the
5661 // same. This is necessary because we may have to branch to the exit
5662 // after evaluating the left hand side (due to the shortcut
5663 // semantics), but the compiler must (statically) know if the result
5664 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01005665 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005666 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005667 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005668 if (has_valid_frame() && !has_cc()) {
5669 // The left-hand side result is on top of the virtual frame.
5670 JumpTarget pop_and_continue;
5671 JumpTarget exit;
5672
Leon Clarkef7060e22010-06-03 12:02:55 +01005673 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00005674 // Avoid popping the result if it converts to 'false' using the
5675 // standard ToBoolean() conversion as described in ECMA-262,
5676 // section 9.2, page 30.
5677 ToBoolean(&pop_and_continue, &exit);
5678 Branch(false, &exit);
5679
5680 // Pop the result of evaluating the first part.
5681 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01005682 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005683
5684 // Evaluate right side expression.
5685 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005686 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00005687
5688 // Exit (always with a materialized value).
5689 exit.Bind();
5690 } else if (has_cc() || is_true.is_linked()) {
5691 // The left-hand side is either (a) partially compiled to
5692 // control flow with a final branch left to emit or (b) fully
5693 // compiled to control flow and possibly true.
5694 if (has_cc()) {
5695 Branch(false, false_target());
5696 }
5697 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005698 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005699 } else {
5700 // Nothing to do.
5701 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
5702 }
5703
Steve Block6ded16b2010-05-10 14:33:55 +01005704 } else {
5705 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00005706 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005707 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005708 if (has_valid_frame() && !has_cc()) {
5709 // The left-hand side result is on top of the virtual frame.
5710 JumpTarget pop_and_continue;
5711 JumpTarget exit;
5712
Leon Clarkef7060e22010-06-03 12:02:55 +01005713 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00005714 // Avoid popping the result if it converts to 'true' using the
5715 // standard ToBoolean() conversion as described in ECMA-262,
5716 // section 9.2, page 30.
5717 ToBoolean(&exit, &pop_and_continue);
5718 Branch(true, &exit);
5719
5720 // Pop the result of evaluating the first part.
5721 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01005722 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005723
5724 // Evaluate right side expression.
5725 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005726 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00005727
5728 // Exit (always with a materialized value).
5729 exit.Bind();
5730 } else if (has_cc() || is_false.is_linked()) {
5731 // The left-hand side is either (a) partially compiled to
5732 // control flow with a final branch left to emit or (b) fully
5733 // compiled to control flow and possibly false.
5734 if (has_cc()) {
5735 Branch(true, true_target());
5736 }
5737 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005738 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005739 } else {
5740 // Nothing to do.
5741 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
5742 }
Steve Block6ded16b2010-05-10 14:33:55 +01005743 }
5744}
Steve Blocka7e24c12009-10-30 11:49:00 +00005745
Steve Block6ded16b2010-05-10 14:33:55 +01005746
5747void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
5748#ifdef DEBUG
5749 int original_height = frame_->height();
5750#endif
5751 Comment cmnt(masm_, "[ BinaryOperation");
5752
5753 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01005754 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005755 } else {
5756 // Optimize for the case where (at least) one of the expressions
5757 // is a literal small integer.
5758 Literal* lliteral = node->left()->AsLiteral();
5759 Literal* rliteral = node->right()->AsLiteral();
5760 // NOTE: The code below assumes that the slow cases (calls to runtime)
5761 // never return a constant/immutable object.
5762 bool overwrite_left =
5763 (node->left()->AsBinaryOperation() != NULL &&
5764 node->left()->AsBinaryOperation()->ResultOverwriteAllowed());
5765 bool overwrite_right =
5766 (node->right()->AsBinaryOperation() != NULL &&
5767 node->right()->AsBinaryOperation()->ResultOverwriteAllowed());
5768
5769 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005770 VirtualFrame::RegisterAllocationScope scope(this);
5771 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005772 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00005773 SmiOperation(node->op(),
5774 rliteral->handle(),
5775 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005776 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005777 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005778 VirtualFrame::RegisterAllocationScope scope(this);
5779 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005780 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00005781 SmiOperation(node->op(),
5782 lliteral->handle(),
5783 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005784 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005785 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005786 GenerateInlineSmi inline_smi =
5787 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
5788 if (lliteral != NULL) {
5789 ASSERT(!lliteral->handle()->IsSmi());
5790 inline_smi = DONT_GENERATE_INLINE_SMI;
5791 }
5792 if (rliteral != NULL) {
5793 ASSERT(!rliteral->handle()->IsSmi());
5794 inline_smi = DONT_GENERATE_INLINE_SMI;
5795 }
Steve Block6ded16b2010-05-10 14:33:55 +01005796 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00005797 OverwriteMode overwrite_mode = NO_OVERWRITE;
5798 if (overwrite_left) {
5799 overwrite_mode = OVERWRITE_LEFT;
5800 } else if (overwrite_right) {
5801 overwrite_mode = OVERWRITE_RIGHT;
5802 }
Steve Block6ded16b2010-05-10 14:33:55 +01005803 Load(node->left());
5804 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005805 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00005806 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005807 }
5808 ASSERT(!has_valid_frame() ||
5809 (has_cc() && frame_->height() == original_height) ||
5810 (!has_cc() && frame_->height() == original_height + 1));
5811}
5812
5813
5814void CodeGenerator::VisitThisFunction(ThisFunction* node) {
5815#ifdef DEBUG
5816 int original_height = frame_->height();
5817#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01005818 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01005819 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005820}
5821
5822
5823void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
5824#ifdef DEBUG
5825 int original_height = frame_->height();
5826#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005827 Comment cmnt(masm_, "[ CompareOperation");
5828
Steve Block6ded16b2010-05-10 14:33:55 +01005829 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
5830
Steve Blocka7e24c12009-10-30 11:49:00 +00005831 // Get the expressions from the node.
5832 Expression* left = node->left();
5833 Expression* right = node->right();
5834 Token::Value op = node->op();
5835
5836 // To make null checks efficient, we check if either left or right is the
5837 // literal 'null'. If so, we optimize the code by inlining a null check
5838 // instead of calling the (very) general runtime routine for checking
5839 // equality.
5840 if (op == Token::EQ || op == Token::EQ_STRICT) {
5841 bool left_is_null =
5842 left->AsLiteral() != NULL && left->AsLiteral()->IsNull();
5843 bool right_is_null =
5844 right->AsLiteral() != NULL && right->AsLiteral()->IsNull();
5845 // The 'null' value can only be equal to 'null' or 'undefined'.
5846 if (left_is_null || right_is_null) {
Steve Block6ded16b2010-05-10 14:33:55 +01005847 Load(left_is_null ? right : left);
5848 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00005849 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005850 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005851
5852 // The 'null' value is only equal to 'undefined' if using non-strict
5853 // comparisons.
5854 if (op != Token::EQ_STRICT) {
5855 true_target()->Branch(eq);
5856
5857 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005858 __ cmp(tos, Operand(ip));
Steve Blocka7e24c12009-10-30 11:49:00 +00005859 true_target()->Branch(eq);
5860
Steve Block6ded16b2010-05-10 14:33:55 +01005861 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005862 false_target()->Branch(eq);
5863
5864 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005865 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
5866 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
5867 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
5868 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005869 }
5870
5871 cc_reg_ = eq;
5872 ASSERT(has_cc() && frame_->height() == original_height);
5873 return;
5874 }
5875 }
5876
5877 // To make typeof testing for natives implemented in JavaScript really
5878 // efficient, we generate special code for expressions of the form:
5879 // 'typeof <expression> == <string>'.
5880 UnaryOperation* operation = left->AsUnaryOperation();
5881 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
5882 (operation != NULL && operation->op() == Token::TYPEOF) &&
5883 (right->AsLiteral() != NULL &&
5884 right->AsLiteral()->handle()->IsString())) {
5885 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
5886
Steve Block6ded16b2010-05-10 14:33:55 +01005887 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00005888 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01005889 Register tos = frame_->PopToRegister();
5890
Steve Block6ded16b2010-05-10 14:33:55 +01005891 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00005892
5893 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005894 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005895 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005896 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005897 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005898 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005899 cc_reg_ = eq;
5900
5901 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005902 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005903 false_target()->Branch(eq);
5904
Steve Block6ded16b2010-05-10 14:33:55 +01005905 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005906
5907 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01005908 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
5909 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
5910 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005911 false_target()->Branch(eq);
5912
Steve Block6ded16b2010-05-10 14:33:55 +01005913 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
5914 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005915 cc_reg_ = lt;
5916
5917 } else if (check->Equals(Heap::boolean_symbol())) {
5918 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005919 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005920 true_target()->Branch(eq);
5921 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005922 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005923 cc_reg_ = eq;
5924
5925 } else if (check->Equals(Heap::undefined_symbol())) {
5926 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005927 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005928 true_target()->Branch(eq);
5929
Steve Block6ded16b2010-05-10 14:33:55 +01005930 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005931 false_target()->Branch(eq);
5932
5933 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005934 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
5935 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
5936 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
5937 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005938
5939 cc_reg_ = eq;
5940
5941 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005942 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005943 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005944 Register map_reg = scratch;
5945 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005946 true_target()->Branch(eq);
5947 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01005948 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005949 cc_reg_ = eq;
5950
5951 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01005952 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005953 false_target()->Branch(eq);
5954
Steve Blocka7e24c12009-10-30 11:49:00 +00005955 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005956 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00005957 true_target()->Branch(eq);
5958
Steve Block6ded16b2010-05-10 14:33:55 +01005959 Register map_reg = scratch;
5960 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005961 false_target()->Branch(eq);
5962
Steve Blocka7e24c12009-10-30 11:49:00 +00005963 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01005964 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5965 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
5966 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00005967 false_target()->Branch(eq);
5968
Steve Block6ded16b2010-05-10 14:33:55 +01005969 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5970 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005971 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01005972 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00005973 cc_reg_ = le;
5974
5975 } else {
5976 // Uncommon case: typeof testing against a string literal that is
5977 // never returned from the typeof operator.
5978 false_target()->Jump();
5979 }
5980 ASSERT(!has_valid_frame() ||
5981 (has_cc() && frame_->height() == original_height));
5982 return;
5983 }
5984
5985 switch (op) {
5986 case Token::EQ:
5987 Comparison(eq, left, right, false);
5988 break;
5989
5990 case Token::LT:
5991 Comparison(lt, left, right);
5992 break;
5993
5994 case Token::GT:
5995 Comparison(gt, left, right);
5996 break;
5997
5998 case Token::LTE:
5999 Comparison(le, left, right);
6000 break;
6001
6002 case Token::GTE:
6003 Comparison(ge, left, right);
6004 break;
6005
6006 case Token::EQ_STRICT:
6007 Comparison(eq, left, right, true);
6008 break;
6009
6010 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006011 Load(left);
6012 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006013 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006014 frame_->EmitPush(r0);
6015 break;
6016 }
6017
6018 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006019 Load(left);
6020 Load(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00006021 InstanceofStub stub;
6022 frame_->CallStub(&stub, 2);
6023 // At this point if instanceof succeeded then r0 == 0.
6024 __ tst(r0, Operand(r0));
6025 cc_reg_ = eq;
6026 break;
6027 }
6028
6029 default:
6030 UNREACHABLE();
6031 }
6032 ASSERT((has_cc() && frame_->height() == original_height) ||
6033 (!has_cc() && frame_->height() == original_height + 1));
6034}
6035
6036
Steve Block6ded16b2010-05-10 14:33:55 +01006037class DeferredReferenceGetNamedValue: public DeferredCode {
6038 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006039 explicit DeferredReferenceGetNamedValue(Register receiver,
6040 Handle<String> name)
6041 : receiver_(receiver), name_(name) {
Steve Block6ded16b2010-05-10 14:33:55 +01006042 set_comment("[ DeferredReferenceGetNamedValue");
6043 }
6044
6045 virtual void Generate();
6046
6047 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006048 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006049 Handle<String> name_;
6050};
6051
6052
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006053// Convention for this is that on entry the receiver is in a register that
6054// is not used by the stack. On exit the answer is found in that same
6055// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006056void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006057#ifdef DEBUG
6058 int expected_height = frame_state()->frame()->height();
6059#endif
6060 VirtualFrame copied_frame(*frame_state()->frame());
6061 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006062
Steve Block6ded16b2010-05-10 14:33:55 +01006063 Register scratch1 = VirtualFrame::scratch0();
6064 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006065 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01006066 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
6067 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
6068
Leon Clarkef7060e22010-06-03 12:02:55 +01006069 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6070 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006071 __ mov(r2, Operand(name_));
6072
6073 // The rest of the instructions in the deferred code must be together.
6074 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6075 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6076 __ Call(ic, RelocInfo::CODE_TARGET);
6077 // The call must be followed by a nop(1) instruction to indicate that the
6078 // in-object has been inlined.
6079 __ nop(PROPERTY_ACCESS_INLINED);
6080
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006081 // At this point the answer is in r0. We move it to the expected register
6082 // if necessary.
6083 __ Move(receiver_, r0);
6084
6085 // Now go back to the frame that we entered with. This will not overwrite
6086 // the receiver register since that register was not in use when we came
6087 // in. The instructions emitted by this merge are skipped over by the
6088 // inline load patching mechanism when looking for the branch instruction
6089 // that tells it where the code to patch is.
6090 copied_frame.MergeTo(frame_state()->frame());
6091
Steve Block6ded16b2010-05-10 14:33:55 +01006092 // Block the constant pool for one more instruction after leaving this
6093 // constant pool block scope to include the branch instruction ending the
6094 // deferred code.
6095 __ BlockConstPoolFor(1);
6096 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006097 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006098}
6099
6100
6101class DeferredReferenceGetKeyedValue: public DeferredCode {
6102 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006103 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6104 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006105 set_comment("[ DeferredReferenceGetKeyedValue");
6106 }
6107
6108 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006109
6110 private:
6111 Register key_;
6112 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006113};
6114
6115
Steve Block8defd9f2010-07-08 12:39:36 +01006116// Takes key and register in r0 and r1 or vice versa. Returns result
6117// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006118void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006119 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6120 (key_.is(r1) && receiver_.is(r0)));
6121
Steve Block8defd9f2010-07-08 12:39:36 +01006122 VirtualFrame copied_frame(*frame_state()->frame());
6123 copied_frame.SpillAll();
6124
Steve Block6ded16b2010-05-10 14:33:55 +01006125 Register scratch1 = VirtualFrame::scratch0();
6126 Register scratch2 = VirtualFrame::scratch1();
6127 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
6128 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
6129
Kristian Monsen25f61362010-05-21 11:50:48 +01006130 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6131 // convention.
6132 if (key_.is(r1)) {
6133 __ Swap(r0, r1, ip);
6134 }
6135
Steve Block6ded16b2010-05-10 14:33:55 +01006136 // The rest of the instructions in the deferred code must be together.
6137 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006138 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01006139 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6140 __ Call(ic, RelocInfo::CODE_TARGET);
6141 // The call must be followed by a nop instruction to indicate that the
6142 // keyed load has been inlined.
6143 __ nop(PROPERTY_ACCESS_INLINED);
6144
Steve Block8defd9f2010-07-08 12:39:36 +01006145 // Now go back to the frame that we entered with. This will not overwrite
6146 // the receiver or key registers since they were not in use when we came
6147 // in. The instructions emitted by this merge are skipped over by the
6148 // inline load patching mechanism when looking for the branch instruction
6149 // that tells it where the code to patch is.
6150 copied_frame.MergeTo(frame_state()->frame());
6151
Steve Block6ded16b2010-05-10 14:33:55 +01006152 // Block the constant pool for one more instruction after leaving this
6153 // constant pool block scope to include the branch instruction ending the
6154 // deferred code.
6155 __ BlockConstPoolFor(1);
6156 }
6157}
6158
6159
6160class DeferredReferenceSetKeyedValue: public DeferredCode {
6161 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006162 DeferredReferenceSetKeyedValue(Register value,
6163 Register key,
6164 Register receiver)
6165 : value_(value), key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006166 set_comment("[ DeferredReferenceSetKeyedValue");
6167 }
6168
6169 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006170
6171 private:
6172 Register value_;
6173 Register key_;
6174 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006175};
6176
6177
6178void DeferredReferenceSetKeyedValue::Generate() {
6179 Register scratch1 = VirtualFrame::scratch0();
6180 Register scratch2 = VirtualFrame::scratch1();
6181 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
6182 __ IncrementCounter(
6183 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
6184
Leon Clarkef7060e22010-06-03 12:02:55 +01006185 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6186 // calling convention.
6187 if (value_.is(r1)) {
6188 __ Swap(r0, r1, ip);
6189 }
6190 ASSERT(receiver_.is(r2));
6191
Steve Block6ded16b2010-05-10 14:33:55 +01006192 // The rest of the instructions in the deferred code must be together.
6193 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006194 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6195 // r1 and r2.
Steve Block6ded16b2010-05-10 14:33:55 +01006196 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
6197 __ Call(ic, RelocInfo::CODE_TARGET);
6198 // The call must be followed by a nop instruction to indicate that the
6199 // keyed store has been inlined.
6200 __ nop(PROPERTY_ACCESS_INLINED);
6201
6202 // Block the constant pool for one more instruction after leaving this
6203 // constant pool block scope to include the branch instruction ending the
6204 // deferred code.
6205 __ BlockConstPoolFor(1);
6206 }
6207}
6208
6209
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006210// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006211void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
6212 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6213 Comment cmnt(masm(), "[ Load from named Property");
6214 // Setup the name register and call load IC.
6215 frame_->CallLoadIC(name,
6216 is_contextual
6217 ? RelocInfo::CODE_TARGET_CONTEXT
6218 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006219 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006220 } else {
6221 // Inline the in-object property case.
6222 Comment cmnt(masm(), "[ Inlined named property load");
6223
6224 // Counter will be decremented in the deferred code. Placed here to avoid
6225 // having it in the instruction stream below where patching will occur.
6226 __ IncrementCounter(&Counters::named_load_inline, 1,
6227 frame_->scratch0(), frame_->scratch1());
6228
6229 // The following instructions are the inlined load of an in-object property.
6230 // Parts of this code is patched, so the exact instructions generated needs
6231 // to be fixed. Therefore the instruction pool is blocked when generating
6232 // this code
6233
6234 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006235 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006236
6237 DeferredReferenceGetNamedValue* deferred =
Leon Clarkef7060e22010-06-03 12:02:55 +01006238 new DeferredReferenceGetNamedValue(receiver, name);
Steve Block6ded16b2010-05-10 14:33:55 +01006239
6240#ifdef DEBUG
6241 int kInlinedNamedLoadInstructions = 7;
6242 Label check_inlined_codesize;
6243 masm_->bind(&check_inlined_codesize);
6244#endif
6245
6246 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6247 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01006248 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006249 deferred->Branch(eq);
6250
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006251 Register scratch = VirtualFrame::scratch0();
6252 Register scratch2 = VirtualFrame::scratch1();
6253
Steve Block6ded16b2010-05-10 14:33:55 +01006254 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006255 // code. Therefore we can't use a LoadRoot call.
6256 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6257 __ mov(scratch2, Operand(Factory::null_value()));
6258 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006259 deferred->Branch(ne);
6260
6261 // Initially use an invalid index. The index will be patched by the
6262 // inline cache code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006263 __ ldr(receiver, MemOperand(receiver, 0));
Steve Block6ded16b2010-05-10 14:33:55 +01006264
6265 // Make sure that the expected number of instructions are generated.
6266 ASSERT_EQ(kInlinedNamedLoadInstructions,
6267 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6268 }
6269
6270 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006271 // At this point the receiver register has the result, either from the
6272 // deferred code or from the inlined code.
6273 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006274 }
6275}
6276
6277
6278void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6279#ifdef DEBUG
6280 int expected_height = frame_->height() - (is_contextual ? 1 : 2);
6281#endif
6282 frame_->CallStoreIC(name, is_contextual);
6283
6284 ASSERT_EQ(expected_height, frame_->height());
6285}
6286
6287
6288void CodeGenerator::EmitKeyedLoad() {
6289 if (loop_nesting() == 0) {
6290 Comment cmnt(masm_, "[ Load from keyed property");
6291 frame_->CallKeyedLoadIC();
6292 } else {
6293 // Inline the keyed load.
6294 Comment cmnt(masm_, "[ Inlined load from keyed property");
6295
6296 // Counter will be decremented in the deferred code. Placed here to avoid
6297 // having it in the instruction stream below where patching will occur.
6298 __ IncrementCounter(&Counters::keyed_load_inline, 1,
6299 frame_->scratch0(), frame_->scratch1());
6300
Kristian Monsen25f61362010-05-21 11:50:48 +01006301 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006302 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006303 Register key = frame_->PopToRegister();
6304 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01006305
Kristian Monsen25f61362010-05-21 11:50:48 +01006306 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01006307 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01006308 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006309
6310 // Check that the receiver is a heap object.
6311 __ tst(receiver, Operand(kSmiTagMask));
6312 deferred->Branch(eq);
6313
6314 // The following instructions are the part of the inlined load keyed
6315 // property code which can be patched. Therefore the exact number of
6316 // instructions generated need to be fixed, so the constant pool is blocked
6317 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01006318 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6319 Register scratch1 = VirtualFrame::scratch0();
6320 Register scratch2 = VirtualFrame::scratch1();
6321 // Check the map. The null map used below is patched by the inline cache
6322 // code.
6323 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006324
6325 // Check that the key is a smi.
6326 if (!key_is_known_smi) {
6327 __ tst(key, Operand(kSmiTagMask));
6328 deferred->Branch(ne);
6329 }
6330
Kristian Monsen25f61362010-05-21 11:50:48 +01006331#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006332 Label check_inlined_codesize;
6333 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01006334#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006335 __ mov(scratch2, Operand(Factory::null_value()));
6336 __ cmp(scratch1, scratch2);
6337 deferred->Branch(ne);
6338
Steve Block6ded16b2010-05-10 14:33:55 +01006339 // Get the elements array from the receiver and check that it
6340 // is not a dictionary.
6341 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Steve Block8defd9f2010-07-08 12:39:36 +01006342 if (FLAG_debug_code) {
6343 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
6344 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
6345 __ cmp(scratch2, ip);
6346 __ Assert(eq, "JSObject with fast elements map has slow elements");
6347 }
Steve Block6ded16b2010-05-10 14:33:55 +01006348
6349 // Check that key is within bounds. Use unsigned comparison to handle
6350 // negative keys.
6351 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006352 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01006353 deferred->Branch(ls); // Unsigned less equal.
6354
6355 // Load and check that the result is not the hole (key is a smi).
6356 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
6357 __ add(scratch1,
6358 scratch1,
6359 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01006360 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01006361 MemOperand(scratch1, key, LSL,
6362 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01006363 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006364 deferred->Branch(eq);
6365
Kristian Monsen25f61362010-05-21 11:50:48 +01006366 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01006367 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01006368 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01006369 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6370 }
6371
6372 deferred->BindExit();
6373 }
6374}
6375
6376
Steve Block8defd9f2010-07-08 12:39:36 +01006377void CodeGenerator::EmitKeyedStore(StaticType* key_type,
6378 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01006379 // Generate inlined version of the keyed store if the code is in a loop
6380 // and the key is likely to be a smi.
6381 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
6382 // Inline the keyed store.
6383 Comment cmnt(masm_, "[ Inlined store to keyed property");
6384
Leon Clarkef7060e22010-06-03 12:02:55 +01006385 Register scratch1 = VirtualFrame::scratch0();
6386 Register scratch2 = VirtualFrame::scratch1();
6387 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01006388
6389 // Counter will be decremented in the deferred code. Placed here to avoid
6390 // having it in the instruction stream below where patching will occur.
6391 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01006392 scratch1, scratch2);
6393
Steve Block8defd9f2010-07-08 12:39:36 +01006394
6395
Leon Clarkef7060e22010-06-03 12:02:55 +01006396 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01006397 bool value_is_harmless = frame_->KnownSmiAt(0);
6398 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
6399 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01006400 Register value = frame_->PopToRegister();
6401 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006402 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006403 Register receiver = r2;
6404 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01006405
6406#ifdef DEBUG
6407 bool we_remembered_the_write_barrier = value_is_harmless;
6408#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006409
6410 // The deferred code expects value, key and receiver in registers.
6411 DeferredReferenceSetKeyedValue* deferred =
6412 new DeferredReferenceSetKeyedValue(value, key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006413
6414 // Check that the value is a smi. As this inlined code does not set the
6415 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01006416 if (!value_is_harmless) {
6417 // If the value is not likely to be a Smi then let's test the fixed array
6418 // for new space instead. See below.
6419 if (wb_info == LIKELY_SMI) {
6420 __ tst(value, Operand(kSmiTagMask));
6421 deferred->Branch(ne);
6422#ifdef DEBUG
6423 we_remembered_the_write_barrier = true;
6424#endif
6425 }
6426 }
Steve Block6ded16b2010-05-10 14:33:55 +01006427
Steve Block8defd9f2010-07-08 12:39:36 +01006428 if (!key_is_smi) {
6429 // Check that the key is a smi.
6430 __ tst(key, Operand(kSmiTagMask));
6431 deferred->Branch(ne);
6432 }
Steve Block6ded16b2010-05-10 14:33:55 +01006433
6434 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01006435 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01006436 deferred->Branch(eq);
6437
6438 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01006439 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01006440 deferred->Branch(ne);
6441
6442 // Check that the key is within bounds. Both the key and the length of
6443 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01006444 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
6445 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01006446 deferred->Branch(ls); // Unsigned less equal.
6447
Steve Block8defd9f2010-07-08 12:39:36 +01006448 // Get the elements array from the receiver.
6449 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
6450 if (!value_is_harmless && wb_info != LIKELY_SMI) {
6451 Label ok;
6452 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
6453 __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
6454 __ tst(value, Operand(kSmiTagMask), ne);
6455 deferred->Branch(ne);
6456#ifdef DEBUG
6457 we_remembered_the_write_barrier = true;
6458#endif
6459 }
6460 // Check that the elements array is not a dictionary.
6461 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01006462 // The following instructions are the part of the inlined store keyed
6463 // property code which can be patched. Therefore the exact number of
6464 // instructions generated need to be fixed, so the constant pool is blocked
6465 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01006466 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01006467#ifdef DEBUG
6468 Label check_inlined_codesize;
6469 masm_->bind(&check_inlined_codesize);
6470#endif
6471
Steve Block6ded16b2010-05-10 14:33:55 +01006472 // Read the fixed array map from the constant pool (not from the root
6473 // array) so that the value can be patched. When debugging, we patch this
6474 // comparison to always fail so that we will hit the IC call in the
6475 // deferred code which will allow the debugger to break for fast case
6476 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01006477 __ mov(scratch3, Operand(Factory::fixed_array_map()));
6478 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01006479 deferred->Branch(ne);
6480
6481 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01006482 __ add(scratch1, scratch1,
6483 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
6484 __ str(value,
6485 MemOperand(scratch1, key, LSL,
6486 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01006487
6488 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01006489 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01006490 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6491 }
6492
Steve Block8defd9f2010-07-08 12:39:36 +01006493 ASSERT(we_remembered_the_write_barrier);
6494
Steve Block6ded16b2010-05-10 14:33:55 +01006495 deferred->BindExit();
6496 } else {
6497 frame()->CallKeyedStoreIC();
6498 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006499}
6500
6501
Steve Blocka7e24c12009-10-30 11:49:00 +00006502#ifdef DEBUG
6503bool CodeGenerator::HasValidEntryRegisters() { return true; }
6504#endif
6505
6506
6507#undef __
6508#define __ ACCESS_MASM(masm)
6509
Steve Blocka7e24c12009-10-30 11:49:00 +00006510Handle<String> Reference::GetName() {
6511 ASSERT(type_ == NAMED);
6512 Property* property = expression_->AsProperty();
6513 if (property == NULL) {
6514 // Global variable reference treated as a named property reference.
6515 VariableProxy* proxy = expression_->AsVariableProxy();
6516 ASSERT(proxy->AsVariable() != NULL);
6517 ASSERT(proxy->AsVariable()->is_global());
6518 return proxy->name();
6519 } else {
6520 Literal* raw_name = property->key()->AsLiteral();
6521 ASSERT(raw_name != NULL);
6522 return Handle<String>(String::cast(*raw_name->handle()));
6523 }
6524}
6525
6526
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006527void Reference::DupIfPersist() {
6528 if (persist_after_get_) {
6529 switch (type_) {
6530 case KEYED:
6531 cgen_->frame()->Dup2();
6532 break;
6533 case NAMED:
6534 cgen_->frame()->Dup();
6535 // Fall through.
6536 case UNLOADED:
6537 case ILLEGAL:
6538 case SLOT:
6539 // Do nothing.
6540 ;
6541 }
6542 } else {
6543 set_unloaded();
6544 }
6545}
6546
6547
Steve Blockd0582a62009-12-15 09:54:21 +00006548void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006549 ASSERT(cgen_->HasValidEntryRegisters());
6550 ASSERT(!is_illegal());
6551 ASSERT(!cgen_->has_cc());
6552 MacroAssembler* masm = cgen_->masm();
6553 Property* property = expression_->AsProperty();
6554 if (property != NULL) {
6555 cgen_->CodeForSourcePosition(property->position());
6556 }
6557
6558 switch (type_) {
6559 case SLOT: {
6560 Comment cmnt(masm, "[ Load from Slot");
6561 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6562 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006563 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01006564 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00006565 break;
6566 }
6567
6568 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00006569 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01006570 bool is_global = var != NULL;
6571 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006572 Handle<String> name = GetName();
6573 DupIfPersist();
6574 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00006575 break;
6576 }
6577
6578 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01006579 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006580 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01006581 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00006582 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00006583 break;
6584 }
6585
6586 default:
6587 UNREACHABLE();
6588 }
6589}
6590
6591
Steve Block8defd9f2010-07-08 12:39:36 +01006592void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006593 ASSERT(!is_illegal());
6594 ASSERT(!cgen_->has_cc());
6595 MacroAssembler* masm = cgen_->masm();
6596 VirtualFrame* frame = cgen_->frame();
6597 Property* property = expression_->AsProperty();
6598 if (property != NULL) {
6599 cgen_->CodeForSourcePosition(property->position());
6600 }
6601
6602 switch (type_) {
6603 case SLOT: {
6604 Comment cmnt(masm, "[ Store to Slot");
6605 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
Leon Clarkee46be812010-01-19 14:06:41 +00006606 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01006607 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006608 break;
6609 }
6610
6611 case NAMED: {
6612 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01006613 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006614 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00006615 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006616 break;
6617 }
6618
6619 case KEYED: {
6620 Comment cmnt(masm, "[ Store to keyed Property");
6621 Property* property = expression_->AsProperty();
6622 ASSERT(property != NULL);
6623 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01006624 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00006625 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01006626 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006627 break;
6628 }
6629
6630 default:
6631 UNREACHABLE();
6632 }
6633}
6634
6635
Leon Clarkee46be812010-01-19 14:06:41 +00006636void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01006637 // Create a new closure from the given function info in new
6638 // space. Set the context to the current context in cp.
Leon Clarkee46be812010-01-19 14:06:41 +00006639 Label gc;
6640
Steve Block6ded16b2010-05-10 14:33:55 +01006641 // Pop the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00006642 __ pop(r3);
6643
6644 // Attempt to allocate new JSFunction in new space.
Kristian Monsen25f61362010-05-21 11:50:48 +01006645 __ AllocateInNewSpace(JSFunction::kSize,
Leon Clarkee46be812010-01-19 14:06:41 +00006646 r0,
6647 r1,
6648 r2,
6649 &gc,
6650 TAG_OBJECT);
6651
6652 // Compute the function map in the current global context and set that
6653 // as the map of the allocated object.
6654 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
6655 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
6656 __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
6657 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
6658
Steve Block6ded16b2010-05-10 14:33:55 +01006659 // Initialize the rest of the function. We don't have to update the
6660 // write barrier because the allocated object is in new space.
6661 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
6662 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
6663 __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset));
6664 __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset));
6665 __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
6666 __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
6667 __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
6668 __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00006669
Steve Block6ded16b2010-05-10 14:33:55 +01006670 // Return result. The argument function info has been popped already.
Leon Clarkee46be812010-01-19 14:06:41 +00006671 __ Ret();
6672
6673 // Create a new closure through the slower runtime call.
6674 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01006675 __ Push(cp, r3);
6676 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00006677}
6678
6679
6680void FastNewContextStub::Generate(MacroAssembler* masm) {
6681 // Try to allocate the context in new space.
6682 Label gc;
6683 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
6684
6685 // Attempt to allocate the context in new space.
Kristian Monsen25f61362010-05-21 11:50:48 +01006686 __ AllocateInNewSpace(FixedArray::SizeFor(length),
Leon Clarkee46be812010-01-19 14:06:41 +00006687 r0,
6688 r1,
6689 r2,
6690 &gc,
6691 TAG_OBJECT);
6692
6693 // Load the function from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00006694 __ ldr(r3, MemOperand(sp, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00006695
6696 // Setup the object header.
6697 __ LoadRoot(r2, Heap::kContextMapRootIndex);
6698 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006699 __ mov(r2, Operand(Smi::FromInt(length)));
6700 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00006701
6702 // Setup the fixed slots.
6703 __ mov(r1, Operand(Smi::FromInt(0)));
6704 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
6705 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
6706 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
6707 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
6708
6709 // Copy the global object from the surrounding context.
6710 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
6711 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
6712
6713 // Initialize the rest of the slots to undefined.
6714 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
6715 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
6716 __ str(r1, MemOperand(r0, Context::SlotOffset(i)));
6717 }
6718
6719 // Remove the on-stack argument and return.
6720 __ mov(cp, r0);
6721 __ pop();
6722 __ Ret();
6723
6724 // Need to collect. Call into runtime system.
6725 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01006726 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00006727}
6728
6729
Andrei Popescu402d9372010-02-26 13:31:12 +00006730void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
6731 // Stack layout on entry:
6732 //
6733 // [sp]: constant elements.
6734 // [sp + kPointerSize]: literal index.
6735 // [sp + (2 * kPointerSize)]: literals array.
6736
6737 // All sizes here are multiples of kPointerSize.
6738 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
6739 int size = JSArray::kSize + elements_size;
6740
6741 // Load boilerplate object into r3 and check if we need to create a
6742 // boilerplate.
6743 Label slow_case;
6744 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
6745 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
6746 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
6747 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
6748 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6749 __ cmp(r3, ip);
6750 __ b(eq, &slow_case);
6751
6752 // Allocate both the JS array and the elements array in one big
6753 // allocation. This avoids multiple limit checks.
Kristian Monsen25f61362010-05-21 11:50:48 +01006754 __ AllocateInNewSpace(size,
Andrei Popescu402d9372010-02-26 13:31:12 +00006755 r0,
6756 r1,
6757 r2,
6758 &slow_case,
6759 TAG_OBJECT);
6760
6761 // Copy the JS array part.
6762 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
6763 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
6764 __ ldr(r1, FieldMemOperand(r3, i));
6765 __ str(r1, FieldMemOperand(r0, i));
6766 }
6767 }
6768
6769 if (length_ > 0) {
6770 // Get hold of the elements array of the boilerplate and setup the
6771 // elements pointer in the resulting object.
6772 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
6773 __ add(r2, r0, Operand(JSArray::kSize));
6774 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
6775
6776 // Copy the elements array.
6777 for (int i = 0; i < elements_size; i += kPointerSize) {
6778 __ ldr(r1, FieldMemOperand(r3, i));
6779 __ str(r1, FieldMemOperand(r2, i));
6780 }
6781 }
6782
6783 // Return and remove the on-stack parameters.
6784 __ add(sp, sp, Operand(3 * kPointerSize));
6785 __ Ret();
6786
6787 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01006788 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006789}
6790
6791
6792// Takes a Smi and converts to an IEEE 64 bit floating point value in two
6793// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
6794// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
6795// scratch register. Destroys the source register. No GC occurs during this
6796// stub so you don't have to set up the frame.
6797class ConvertToDoubleStub : public CodeStub {
6798 public:
6799 ConvertToDoubleStub(Register result_reg_1,
6800 Register result_reg_2,
6801 Register source_reg,
6802 Register scratch_reg)
6803 : result1_(result_reg_1),
6804 result2_(result_reg_2),
6805 source_(source_reg),
6806 zeros_(scratch_reg) { }
6807
6808 private:
6809 Register result1_;
6810 Register result2_;
6811 Register source_;
6812 Register zeros_;
6813
6814 // Minor key encoding in 16 bits.
6815 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
6816 class OpBits: public BitField<Token::Value, 2, 14> {};
6817
6818 Major MajorKey() { return ConvertToDouble; }
6819 int MinorKey() {
6820 // Encode the parameters in a unique 16 bit value.
6821 return result1_.code() +
6822 (result2_.code() << 4) +
6823 (source_.code() << 8) +
6824 (zeros_.code() << 12);
6825 }
6826
6827 void Generate(MacroAssembler* masm);
6828
6829 const char* GetName() { return "ConvertToDoubleStub"; }
6830
6831#ifdef DEBUG
6832 void Print() { PrintF("ConvertToDoubleStub\n"); }
6833#endif
6834};
6835
6836
6837void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
6838#ifndef BIG_ENDIAN_FLOATING_POINT
6839 Register exponent = result1_;
6840 Register mantissa = result2_;
6841#else
6842 Register exponent = result2_;
6843 Register mantissa = result1_;
6844#endif
6845 Label not_special;
6846 // Convert from Smi to integer.
6847 __ mov(source_, Operand(source_, ASR, kSmiTagSize));
6848 // Move sign bit from source to destination. This works because the sign bit
6849 // in the exponent word of the double has the same position and polarity as
6850 // the 2's complement sign bit in a Smi.
6851 ASSERT(HeapNumber::kSignMask == 0x80000000u);
6852 __ and_(exponent, source_, Operand(HeapNumber::kSignMask), SetCC);
6853 // Subtract from 0 if source was negative.
6854 __ rsb(source_, source_, Operand(0), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01006855
6856 // We have -1, 0 or 1, which we treat specially. Register source_ contains
6857 // absolute value: it is either equal to 1 (special case of -1 and 1),
6858 // greater than 1 (not a special case) or less than 1 (special case of 0).
Steve Blocka7e24c12009-10-30 11:49:00 +00006859 __ cmp(source_, Operand(1));
6860 __ b(gt, &not_special);
6861
Steve Blocka7e24c12009-10-30 11:49:00 +00006862 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
6863 static const uint32_t exponent_word_for_1 =
6864 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
Steve Block6ded16b2010-05-10 14:33:55 +01006865 __ orr(exponent, exponent, Operand(exponent_word_for_1), LeaveCC, eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00006866 // 1, 0 and -1 all have 0 for the second word.
6867 __ mov(mantissa, Operand(0));
6868 __ Ret();
6869
6870 __ bind(&not_special);
Steve Block6ded16b2010-05-10 14:33:55 +01006871 // Count leading zeros. Uses mantissa for a scratch register on pre-ARM5.
Steve Blocka7e24c12009-10-30 11:49:00 +00006872 // Gets the wrong answer for 0, but we already checked for that case above.
Steve Block8defd9f2010-07-08 12:39:36 +01006873 __ CountLeadingZeros(zeros_, source_, mantissa);
Steve Blocka7e24c12009-10-30 11:49:00 +00006874 // Compute exponent and or it into the exponent register.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01006875 // We use mantissa as a scratch register here. Use a fudge factor to
6876 // divide the constant 31 + HeapNumber::kExponentBias, 0x41d, into two parts
6877 // that fit in the ARM's constant field.
6878 int fudge = 0x400;
6879 __ rsb(mantissa, zeros_, Operand(31 + HeapNumber::kExponentBias - fudge));
6880 __ add(mantissa, mantissa, Operand(fudge));
Steve Blocka7e24c12009-10-30 11:49:00 +00006881 __ orr(exponent,
6882 exponent,
6883 Operand(mantissa, LSL, HeapNumber::kExponentShift));
6884 // Shift up the source chopping the top bit off.
6885 __ add(zeros_, zeros_, Operand(1));
6886 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
6887 __ mov(source_, Operand(source_, LSL, zeros_));
6888 // Compute lower part of fraction (last 12 bits).
6889 __ mov(mantissa, Operand(source_, LSL, HeapNumber::kMantissaBitsInTopWord));
6890 // And the top (top 20 bits).
6891 __ orr(exponent,
6892 exponent,
6893 Operand(source_, LSR, 32 - HeapNumber::kMantissaBitsInTopWord));
6894 __ Ret();
6895}
6896
6897
Steve Blocka7e24c12009-10-30 11:49:00 +00006898// See comment for class.
Steve Blockd0582a62009-12-15 09:54:21 +00006899void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006900 Label max_negative_int;
6901 // the_int_ has the answer which is a signed int32 but not a Smi.
6902 // We test for the special value that has a different exponent. This test
6903 // has the neat side effect of setting the flags according to the sign.
6904 ASSERT(HeapNumber::kSignMask == 0x80000000u);
6905 __ cmp(the_int_, Operand(0x80000000u));
6906 __ b(eq, &max_negative_int);
6907 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
6908 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
6909 uint32_t non_smi_exponent =
6910 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
6911 __ mov(scratch_, Operand(non_smi_exponent));
6912 // Set the sign bit in scratch_ if the value was negative.
6913 __ orr(scratch_, scratch_, Operand(HeapNumber::kSignMask), LeaveCC, cs);
6914 // Subtract from 0 if the value was negative.
6915 __ rsb(the_int_, the_int_, Operand(0), LeaveCC, cs);
6916 // We should be masking the implict first digit of the mantissa away here,
6917 // but it just ends up combining harmlessly with the last digit of the
6918 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
6919 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
6920 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
6921 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
6922 __ orr(scratch_, scratch_, Operand(the_int_, LSR, shift_distance));
6923 __ str(scratch_, FieldMemOperand(the_heap_number_,
6924 HeapNumber::kExponentOffset));
6925 __ mov(scratch_, Operand(the_int_, LSL, 32 - shift_distance));
6926 __ str(scratch_, FieldMemOperand(the_heap_number_,
6927 HeapNumber::kMantissaOffset));
6928 __ Ret();
6929
6930 __ bind(&max_negative_int);
6931 // The max negative int32 is stored as a positive number in the mantissa of
6932 // a double because it uses a sign bit instead of using two's complement.
6933 // The actual mantissa bits stored are all 0 because the implicit most
6934 // significant 1 bit is not stored.
6935 non_smi_exponent += 1 << HeapNumber::kExponentShift;
6936 __ mov(ip, Operand(HeapNumber::kSignMask | non_smi_exponent));
6937 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
6938 __ mov(ip, Operand(0));
6939 __ str(ip, FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
6940 __ Ret();
6941}
6942
6943
6944// Handle the case where the lhs and rhs are the same object.
6945// Equality is almost reflexive (everything but NaN), so this is a test
6946// for "identity and not NaN".
6947static void EmitIdenticalObjectComparison(MacroAssembler* masm,
6948 Label* slow,
Leon Clarkee46be812010-01-19 14:06:41 +00006949 Condition cc,
6950 bool never_nan_nan) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006951 Label not_identical;
Leon Clarkee46be812010-01-19 14:06:41 +00006952 Label heap_number, return_equal;
Steve Block6ded16b2010-05-10 14:33:55 +01006953 __ cmp(r0, r1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006954 __ b(ne, &not_identical);
6955
Leon Clarkee46be812010-01-19 14:06:41 +00006956 // The two objects are identical. If we know that one of them isn't NaN then
6957 // we now know they test equal.
6958 if (cc != eq || !never_nan_nan) {
Leon Clarkee46be812010-01-19 14:06:41 +00006959 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
6960 // so we do the second best thing - test it ourselves.
6961 // They are both equal and they are not both Smis so both of them are not
6962 // Smis. If it's not a heap number, then return equal.
6963 if (cc == lt || cc == gt) {
6964 __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006965 __ b(ge, slow);
Leon Clarkee46be812010-01-19 14:06:41 +00006966 } else {
6967 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
6968 __ b(eq, &heap_number);
6969 // Comparing JS objects with <=, >= is complicated.
6970 if (cc != eq) {
6971 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
6972 __ b(ge, slow);
6973 // Normally here we fall through to return_equal, but undefined is
6974 // special: (undefined == undefined) == true, but
6975 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
6976 if (cc == le || cc == ge) {
6977 __ cmp(r4, Operand(ODDBALL_TYPE));
6978 __ b(ne, &return_equal);
6979 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006980 __ cmp(r0, r2);
Leon Clarkee46be812010-01-19 14:06:41 +00006981 __ b(ne, &return_equal);
6982 if (cc == le) {
6983 // undefined <= undefined should fail.
6984 __ mov(r0, Operand(GREATER));
6985 } else {
6986 // undefined >= undefined should fail.
6987 __ mov(r0, Operand(LESS));
6988 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006989 __ Ret();
Steve Blockd0582a62009-12-15 09:54:21 +00006990 }
Steve Blockd0582a62009-12-15 09:54:21 +00006991 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006992 }
6993 }
Leon Clarkee46be812010-01-19 14:06:41 +00006994
Steve Blocka7e24c12009-10-30 11:49:00 +00006995 __ bind(&return_equal);
6996 if (cc == lt) {
6997 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
6998 } else if (cc == gt) {
6999 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
7000 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00007001 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
Steve Blocka7e24c12009-10-30 11:49:00 +00007002 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007003 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007004
Leon Clarkee46be812010-01-19 14:06:41 +00007005 if (cc != eq || !never_nan_nan) {
7006 // For less and greater we don't have to check for NaN since the result of
7007 // x < x is false regardless. For the others here is some code to check
7008 // for NaN.
7009 if (cc != lt && cc != gt) {
7010 __ bind(&heap_number);
7011 // It is a heap number, so return non-equal if it's NaN and equal if it's
7012 // not NaN.
Steve Blocka7e24c12009-10-30 11:49:00 +00007013
Leon Clarkee46be812010-01-19 14:06:41 +00007014 // The representation of NaN values has all exponent bits (52..62) set,
7015 // and not all mantissa bits (0..51) clear.
7016 // Read top bits of double representation (second word of value).
7017 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
7018 // Test that exponent bits are all set.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007019 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
7020 // NaNs have all-one exponents so they sign extend to -1.
7021 __ cmp(r3, Operand(-1));
Leon Clarkee46be812010-01-19 14:06:41 +00007022 __ b(ne, &return_equal);
7023
7024 // Shift out flag and all exponent bits, retaining only mantissa.
7025 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
7026 // Or with all low-bits of mantissa.
7027 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
7028 __ orr(r0, r3, Operand(r2), SetCC);
7029 // For equal we already have the right value in r0: Return zero (equal)
7030 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
7031 // not (it's a NaN). For <= and >= we need to load r0 with the failing
7032 // value if it's a NaN.
7033 if (cc != eq) {
7034 // All-zero means Infinity means equal.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007035 __ Ret(eq);
Leon Clarkee46be812010-01-19 14:06:41 +00007036 if (cc == le) {
7037 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
7038 } else {
7039 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
7040 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007041 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007042 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007043 }
Leon Clarkee46be812010-01-19 14:06:41 +00007044 // No fall through here.
Steve Blocka7e24c12009-10-30 11:49:00 +00007045 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007046
7047 __ bind(&not_identical);
7048}
7049
7050
7051// See comment at call site.
7052static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007053 Register lhs,
7054 Register rhs,
Leon Clarkee46be812010-01-19 14:06:41 +00007055 Label* lhs_not_nan,
Steve Blocka7e24c12009-10-30 11:49:00 +00007056 Label* slow,
7057 bool strict) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007058 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7059 (lhs.is(r1) && rhs.is(r0)));
7060
Leon Clarked91b9f72010-01-27 17:25:45 +00007061 Label rhs_is_smi;
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007062 __ tst(rhs, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00007063 __ b(eq, &rhs_is_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007064
Leon Clarked91b9f72010-01-27 17:25:45 +00007065 // Lhs is a Smi. Check whether the rhs is a heap number.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007066 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007067 if (strict) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007068 // If rhs is not a number and lhs is a Smi then strict equality cannot
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007069 // succeed. Return non-equal
7070 // If rhs is r0 then there is already a non zero value in it.
7071 if (!rhs.is(r0)) {
7072 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
7073 }
7074 __ Ret(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00007075 } else {
7076 // Smi compared non-strictly with a non-Smi non-heap-number. Call
7077 // the runtime.
7078 __ b(ne, slow);
7079 }
7080
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007081 // Lhs is a smi, rhs is a number.
Steve Blockd0582a62009-12-15 09:54:21 +00007082 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007083 // Convert lhs to a double in d7.
Steve Blockd0582a62009-12-15 09:54:21 +00007084 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007085 __ SmiToDoubleVFPRegister(lhs, d7, r7, s15);
Leon Clarked91b9f72010-01-27 17:25:45 +00007086 // Load the double from rhs, tagged HeapNumber r0, to d6.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007087 __ sub(r7, rhs, Operand(kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00007088 __ vldr(d6, r7, HeapNumber::kValueOffset);
Steve Blockd0582a62009-12-15 09:54:21 +00007089 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00007090 __ push(lr);
7091 // Convert lhs to a double in r2, r3.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007092 __ mov(r7, Operand(lhs));
Steve Blockd0582a62009-12-15 09:54:21 +00007093 ConvertToDoubleStub stub1(r3, r2, r7, r6);
7094 __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00007095 // Load rhs to a double in r0, r1.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007096 __ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00007097 __ pop(lr);
Steve Blockd0582a62009-12-15 09:54:21 +00007098 }
7099
Steve Blocka7e24c12009-10-30 11:49:00 +00007100 // We now have both loaded as doubles but we can skip the lhs nan check
Leon Clarked91b9f72010-01-27 17:25:45 +00007101 // since it's a smi.
Leon Clarkee46be812010-01-19 14:06:41 +00007102 __ jmp(lhs_not_nan);
Steve Blocka7e24c12009-10-30 11:49:00 +00007103
Leon Clarked91b9f72010-01-27 17:25:45 +00007104 __ bind(&rhs_is_smi);
7105 // Rhs is a smi. Check whether the non-smi lhs is a heap number.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007106 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007107 if (strict) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007108 // If lhs is not a number and rhs is a smi then strict equality cannot
Steve Blocka7e24c12009-10-30 11:49:00 +00007109 // succeed. Return non-equal.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007110 // If lhs is r0 then there is already a non zero value in it.
7111 if (!lhs.is(r0)) {
7112 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
7113 }
7114 __ Ret(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00007115 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00007116 // Smi compared non-strictly with a non-smi non-heap-number. Call
Steve Blocka7e24c12009-10-30 11:49:00 +00007117 // the runtime.
7118 __ b(ne, slow);
7119 }
7120
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007121 // Rhs is a smi, lhs is a heap number.
Steve Blockd0582a62009-12-15 09:54:21 +00007122 if (CpuFeatures::IsSupported(VFP3)) {
7123 CpuFeatures::Scope scope(VFP3);
Leon Clarked91b9f72010-01-27 17:25:45 +00007124 // Load the double from lhs, tagged HeapNumber r1, to d7.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007125 __ sub(r7, lhs, Operand(kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00007126 __ vldr(d7, r7, HeapNumber::kValueOffset);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007127 // Convert rhs to a double in d6 .
7128 __ SmiToDoubleVFPRegister(rhs, d6, r7, s13);
Steve Blockd0582a62009-12-15 09:54:21 +00007129 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00007130 __ push(lr);
7131 // Load lhs to a double in r2, r3.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007132 __ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00007133 // Convert rhs to a double in r0, r1.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007134 __ mov(r7, Operand(rhs));
Steve Blockd0582a62009-12-15 09:54:21 +00007135 ConvertToDoubleStub stub2(r1, r0, r7, r6);
7136 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00007137 __ pop(lr);
Steve Blockd0582a62009-12-15 09:54:21 +00007138 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007139 // Fall through to both_loaded_as_doubles.
7140}
7141
7142
Leon Clarkee46be812010-01-19 14:06:41 +00007143void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cc) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007144 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
Leon Clarkee46be812010-01-19 14:06:41 +00007145 Register rhs_exponent = exp_first ? r0 : r1;
7146 Register lhs_exponent = exp_first ? r2 : r3;
7147 Register rhs_mantissa = exp_first ? r1 : r0;
7148 Register lhs_mantissa = exp_first ? r3 : r2;
Steve Blocka7e24c12009-10-30 11:49:00 +00007149 Label one_is_nan, neither_is_nan;
7150
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007151 __ Sbfx(r4,
7152 lhs_exponent,
7153 HeapNumber::kExponentShift,
7154 HeapNumber::kExponentBits);
7155 // NaNs have all-one exponents so they sign extend to -1.
7156 __ cmp(r4, Operand(-1));
7157 __ b(ne, lhs_not_nan);
Steve Blocka7e24c12009-10-30 11:49:00 +00007158 __ mov(r4,
7159 Operand(lhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
7160 SetCC);
7161 __ b(ne, &one_is_nan);
7162 __ cmp(lhs_mantissa, Operand(0));
Leon Clarkee46be812010-01-19 14:06:41 +00007163 __ b(ne, &one_is_nan);
7164
7165 __ bind(lhs_not_nan);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007166 __ Sbfx(r4,
7167 rhs_exponent,
7168 HeapNumber::kExponentShift,
7169 HeapNumber::kExponentBits);
7170 // NaNs have all-one exponents so they sign extend to -1.
7171 __ cmp(r4, Operand(-1));
Leon Clarkee46be812010-01-19 14:06:41 +00007172 __ b(ne, &neither_is_nan);
7173 __ mov(r4,
7174 Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
7175 SetCC);
7176 __ b(ne, &one_is_nan);
7177 __ cmp(rhs_mantissa, Operand(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00007178 __ b(eq, &neither_is_nan);
7179
7180 __ bind(&one_is_nan);
7181 // NaN comparisons always fail.
7182 // Load whatever we need in r0 to make the comparison fail.
7183 if (cc == lt || cc == le) {
7184 __ mov(r0, Operand(GREATER));
7185 } else {
7186 __ mov(r0, Operand(LESS));
7187 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007188 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007189
7190 __ bind(&neither_is_nan);
7191}
7192
7193
7194// See comment at call site.
7195static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
7196 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
Leon Clarkee46be812010-01-19 14:06:41 +00007197 Register rhs_exponent = exp_first ? r0 : r1;
7198 Register lhs_exponent = exp_first ? r2 : r3;
7199 Register rhs_mantissa = exp_first ? r1 : r0;
7200 Register lhs_mantissa = exp_first ? r3 : r2;
Steve Blocka7e24c12009-10-30 11:49:00 +00007201
7202 // r0, r1, r2, r3 have the two doubles. Neither is a NaN.
7203 if (cc == eq) {
7204 // Doubles are not equal unless they have the same bit pattern.
7205 // Exception: 0 and -0.
Leon Clarkee46be812010-01-19 14:06:41 +00007206 __ cmp(rhs_mantissa, Operand(lhs_mantissa));
7207 __ orr(r0, rhs_mantissa, Operand(lhs_mantissa), LeaveCC, ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00007208 // Return non-zero if the numbers are unequal.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007209 __ Ret(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00007210
Leon Clarkee46be812010-01-19 14:06:41 +00007211 __ sub(r0, rhs_exponent, Operand(lhs_exponent), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00007212 // If exponents are equal then return 0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007213 __ Ret(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00007214
7215 // Exponents are unequal. The only way we can return that the numbers
7216 // are equal is if one is -0 and the other is 0. We already dealt
7217 // with the case where both are -0 or both are 0.
7218 // We start by seeing if the mantissas (that are equal) or the bottom
7219 // 31 bits of the rhs exponent are non-zero. If so we return not
7220 // equal.
Leon Clarkee46be812010-01-19 14:06:41 +00007221 __ orr(r4, lhs_mantissa, Operand(lhs_exponent, LSL, kSmiTagSize), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00007222 __ mov(r0, Operand(r4), LeaveCC, ne);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007223 __ Ret(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00007224 // Now they are equal if and only if the lhs exponent is zero in its
7225 // low 31 bits.
Leon Clarkee46be812010-01-19 14:06:41 +00007226 __ mov(r0, Operand(rhs_exponent, LSL, kSmiTagSize));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007227 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007228 } else {
7229 // Call a native function to do a comparison between two non-NaNs.
7230 // Call C routine that may not cause GC or other trouble.
Steve Block6ded16b2010-05-10 14:33:55 +01007231 __ push(lr);
7232 __ PrepareCallCFunction(4, r5); // Two doubles count as 4 arguments.
7233 __ CallCFunction(ExternalReference::compare_doubles(), 4);
7234 __ pop(pc); // Return.
Steve Blocka7e24c12009-10-30 11:49:00 +00007235 }
7236}
7237
7238
7239// See comment at call site.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007240static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
7241 Register lhs,
7242 Register rhs) {
7243 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7244 (lhs.is(r1) && rhs.is(r0)));
7245
Steve Blocka7e24c12009-10-30 11:49:00 +00007246 // If either operand is a JSObject or an oddball value, then they are
7247 // not equal since their pointers are different.
7248 // There is no test for undetectability in strict equality.
7249 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7250 Label first_non_object;
7251 // Get the type of the first operand into r2 and compare it with
7252 // FIRST_JS_OBJECT_TYPE.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007253 __ CompareObjectType(rhs, r2, r2, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007254 __ b(lt, &first_non_object);
7255
7256 // Return non-zero (r0 is not zero)
7257 Label return_not_equal;
7258 __ bind(&return_not_equal);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007259 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007260
7261 __ bind(&first_non_object);
7262 // Check for oddballs: true, false, null, undefined.
7263 __ cmp(r2, Operand(ODDBALL_TYPE));
7264 __ b(eq, &return_not_equal);
7265
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007266 __ CompareObjectType(lhs, r3, r3, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007267 __ b(ge, &return_not_equal);
7268
7269 // Check for oddballs: true, false, null, undefined.
7270 __ cmp(r3, Operand(ODDBALL_TYPE));
7271 __ b(eq, &return_not_equal);
Leon Clarkee46be812010-01-19 14:06:41 +00007272
7273 // Now that we have the types we might as well check for symbol-symbol.
7274 // Ensure that no non-strings have the symbol bit set.
7275 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
7276 ASSERT(kSymbolTag != 0);
7277 __ and_(r2, r2, Operand(r3));
7278 __ tst(r2, Operand(kIsSymbolMask));
7279 __ b(ne, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007280}
7281
7282
7283// See comment at call site.
7284static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007285 Register lhs,
7286 Register rhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00007287 Label* both_loaded_as_doubles,
7288 Label* not_heap_numbers,
7289 Label* slow) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007290 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7291 (lhs.is(r1) && rhs.is(r0)));
7292
7293 __ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00007294 __ b(ne, not_heap_numbers);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007295 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00007296 __ cmp(r2, r3);
Steve Blocka7e24c12009-10-30 11:49:00 +00007297 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
7298
7299 // Both are heap numbers. Load them up then jump to the code we have
7300 // for that.
Leon Clarked91b9f72010-01-27 17:25:45 +00007301 if (CpuFeatures::IsSupported(VFP3)) {
7302 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007303 __ sub(r7, rhs, Operand(kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00007304 __ vldr(d6, r7, HeapNumber::kValueOffset);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007305 __ sub(r7, lhs, Operand(kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00007306 __ vldr(d7, r7, HeapNumber::kValueOffset);
7307 } else {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007308 __ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset));
7309 __ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00007310 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007311 __ jmp(both_loaded_as_doubles);
7312}
7313
7314
7315// Fast negative check for symbol-to-symbol equality.
Steve Block8defd9f2010-07-08 12:39:36 +01007316static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007317 Register lhs,
7318 Register rhs,
Steve Block8defd9f2010-07-08 12:39:36 +01007319 Label* possible_strings,
7320 Label* not_both_strings) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007321 ASSERT((lhs.is(r0) && rhs.is(r1)) ||
7322 (lhs.is(r1) && rhs.is(r0)));
7323
7324 // r2 is object type of rhs.
Leon Clarkee46be812010-01-19 14:06:41 +00007325 // Ensure that no non-strings have the symbol bit set.
Steve Block8defd9f2010-07-08 12:39:36 +01007326 Label object_test;
Leon Clarkee46be812010-01-19 14:06:41 +00007327 ASSERT(kSymbolTag != 0);
Steve Block8defd9f2010-07-08 12:39:36 +01007328 __ tst(r2, Operand(kIsNotStringMask));
7329 __ b(ne, &object_test);
Steve Blocka7e24c12009-10-30 11:49:00 +00007330 __ tst(r2, Operand(kIsSymbolMask));
Steve Block8defd9f2010-07-08 12:39:36 +01007331 __ b(eq, possible_strings);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007332 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
Steve Block8defd9f2010-07-08 12:39:36 +01007333 __ b(ge, not_both_strings);
Steve Blocka7e24c12009-10-30 11:49:00 +00007334 __ tst(r3, Operand(kIsSymbolMask));
Steve Block8defd9f2010-07-08 12:39:36 +01007335 __ b(eq, possible_strings);
Steve Blocka7e24c12009-10-30 11:49:00 +00007336
7337 // Both are symbols. We already checked they weren't the same pointer
7338 // so they are not equal.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007339 __ mov(r0, Operand(NOT_EQUAL));
7340 __ Ret();
Steve Block8defd9f2010-07-08 12:39:36 +01007341
7342 __ bind(&object_test);
7343 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
7344 __ b(lt, not_both_strings);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007345 __ CompareObjectType(lhs, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block8defd9f2010-07-08 12:39:36 +01007346 __ b(lt, not_both_strings);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007347 // If both objects are undetectable, they are equal. Otherwise, they
Steve Block8defd9f2010-07-08 12:39:36 +01007348 // are not equal, since they are different objects and an object is not
7349 // equal to undefined.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007350 __ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
Steve Block8defd9f2010-07-08 12:39:36 +01007351 __ ldrb(r2, FieldMemOperand(r2, Map::kBitFieldOffset));
7352 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
7353 __ and_(r0, r2, Operand(r3));
7354 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
7355 __ eor(r0, r0, Operand(1 << Map::kIsUndetectable));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007356 __ Ret();
Steve Blocka7e24c12009-10-30 11:49:00 +00007357}
7358
7359
Steve Block6ded16b2010-05-10 14:33:55 +01007360void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
7361 Register object,
7362 Register result,
7363 Register scratch1,
7364 Register scratch2,
7365 Register scratch3,
7366 bool object_is_smi,
7367 Label* not_found) {
7368 // Use of registers. Register result is used as a temporary.
7369 Register number_string_cache = result;
7370 Register mask = scratch3;
7371
7372 // Load the number string cache.
7373 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
7374
7375 // Make the hash mask from the length of the number string cache. It
7376 // contains two elements (number and string) for each cache entry.
7377 __ ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007378 // Divide length by two (length is a smi).
7379 __ mov(mask, Operand(mask, ASR, kSmiTagSize + 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007380 __ sub(mask, mask, Operand(1)); // Make mask.
7381
7382 // Calculate the entry in the number string cache. The hash value in the
7383 // number string cache for smis is just the smi value, and the hash for
7384 // doubles is the xor of the upper and lower words. See
7385 // Heap::GetNumberStringCache.
7386 Label is_smi;
7387 Label load_result_from_cache;
7388 if (!object_is_smi) {
7389 __ BranchOnSmi(object, &is_smi);
7390 if (CpuFeatures::IsSupported(VFP3)) {
7391 CpuFeatures::Scope scope(VFP3);
7392 __ CheckMap(object,
7393 scratch1,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007394 Heap::kHeapNumberMapRootIndex,
Steve Block6ded16b2010-05-10 14:33:55 +01007395 not_found,
7396 true);
7397
7398 ASSERT_EQ(8, kDoubleSize);
7399 __ add(scratch1,
7400 object,
7401 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
7402 __ ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
7403 __ eor(scratch1, scratch1, Operand(scratch2));
7404 __ and_(scratch1, scratch1, Operand(mask));
7405
7406 // Calculate address of entry in string cache: each entry consists
7407 // of two pointer sized fields.
7408 __ add(scratch1,
7409 number_string_cache,
7410 Operand(scratch1, LSL, kPointerSizeLog2 + 1));
7411
7412 Register probe = mask;
7413 __ ldr(probe,
7414 FieldMemOperand(scratch1, FixedArray::kHeaderSize));
7415 __ BranchOnSmi(probe, not_found);
7416 __ sub(scratch2, object, Operand(kHeapObjectTag));
7417 __ vldr(d0, scratch2, HeapNumber::kValueOffset);
7418 __ sub(probe, probe, Operand(kHeapObjectTag));
7419 __ vldr(d1, probe, HeapNumber::kValueOffset);
7420 __ vcmp(d0, d1);
7421 __ vmrs(pc);
7422 __ b(ne, not_found); // The cache did not contain this value.
7423 __ b(&load_result_from_cache);
7424 } else {
7425 __ b(not_found);
7426 }
7427 }
7428
7429 __ bind(&is_smi);
7430 Register scratch = scratch1;
7431 __ and_(scratch, mask, Operand(object, ASR, 1));
7432 // Calculate address of entry in string cache: each entry consists
7433 // of two pointer sized fields.
7434 __ add(scratch,
7435 number_string_cache,
7436 Operand(scratch, LSL, kPointerSizeLog2 + 1));
7437
7438 // Check if the entry is the smi we are looking for.
7439 Register probe = mask;
7440 __ ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
7441 __ cmp(object, probe);
7442 __ b(ne, not_found);
7443
7444 // Get the result from the cache.
7445 __ bind(&load_result_from_cache);
7446 __ ldr(result,
7447 FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
7448 __ IncrementCounter(&Counters::number_to_string_native,
7449 1,
7450 scratch1,
7451 scratch2);
7452}
7453
7454
7455void NumberToStringStub::Generate(MacroAssembler* masm) {
7456 Label runtime;
7457
7458 __ ldr(r1, MemOperand(sp, 0));
7459
7460 // Generate code to lookup number in the number string cache.
7461 GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime);
7462 __ add(sp, sp, Operand(1 * kPointerSize));
7463 __ Ret();
7464
7465 __ bind(&runtime);
7466 // Handle number to string in the runtime system if not found in the cache.
7467 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
7468}
7469
7470
7471void RecordWriteStub::Generate(MacroAssembler* masm) {
Steve Block8defd9f2010-07-08 12:39:36 +01007472 __ add(offset_, object_, Operand(offset_));
7473 __ RecordWriteHelper(object_, offset_, scratch_);
Steve Block6ded16b2010-05-10 14:33:55 +01007474 __ Ret();
7475}
7476
7477
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007478// On entry lhs_ and rhs_ are the values to be compared.
Leon Clarked91b9f72010-01-27 17:25:45 +00007479// On exit r0 is 0, positive or negative to indicate the result of
7480// the comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00007481void CompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007482 ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
7483 (lhs_.is(r1) && rhs_.is(r0)));
7484
Steve Blocka7e24c12009-10-30 11:49:00 +00007485 Label slow; // Call builtin.
Leon Clarkee46be812010-01-19 14:06:41 +00007486 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
Steve Blocka7e24c12009-10-30 11:49:00 +00007487
7488 // NOTICE! This code is only reached after a smi-fast-case check, so
7489 // it is certain that at least one operand isn't a smi.
7490
7491 // Handle the case where the objects are identical. Either returns the answer
7492 // or goes to slow. Only falls through if the objects were not identical.
Leon Clarkee46be812010-01-19 14:06:41 +00007493 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007494
7495 // If either is a Smi (we know that not both are), then they can only
7496 // be strictly equal if the other is a HeapNumber.
7497 ASSERT_EQ(0, kSmiTag);
7498 ASSERT_EQ(0, Smi::FromInt(0));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007499 __ and_(r2, lhs_, Operand(rhs_));
Steve Blocka7e24c12009-10-30 11:49:00 +00007500 __ tst(r2, Operand(kSmiTagMask));
7501 __ b(ne, &not_smis);
7502 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
7503 // 1) Return the answer.
7504 // 2) Go to slow.
7505 // 3) Fall through to both_loaded_as_doubles.
Leon Clarkee46be812010-01-19 14:06:41 +00007506 // 4) Jump to lhs_not_nan.
Steve Blocka7e24c12009-10-30 11:49:00 +00007507 // In cases 3 and 4 we have found out we were dealing with a number-number
Leon Clarked91b9f72010-01-27 17:25:45 +00007508 // comparison. If VFP3 is supported the double values of the numbers have
7509 // been loaded into d7 and d6. Otherwise, the double values have been loaded
7510 // into r0, r1, r2, and r3.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007511 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007512
7513 __ bind(&both_loaded_as_doubles);
Leon Clarked91b9f72010-01-27 17:25:45 +00007514 // The arguments have been converted to doubles and stored in d6 and d7, if
7515 // VFP3 is supported, or in r0, r1, r2, and r3.
Steve Blockd0582a62009-12-15 09:54:21 +00007516 if (CpuFeatures::IsSupported(VFP3)) {
Leon Clarkee46be812010-01-19 14:06:41 +00007517 __ bind(&lhs_not_nan);
Steve Blockd0582a62009-12-15 09:54:21 +00007518 CpuFeatures::Scope scope(VFP3);
Leon Clarkee46be812010-01-19 14:06:41 +00007519 Label no_nan;
Steve Blockd0582a62009-12-15 09:54:21 +00007520 // ARMv7 VFP3 instructions to implement double precision comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00007521 __ vcmp(d7, d6);
7522 __ vmrs(pc); // Move vector status bits to normal status bits.
7523 Label nan;
7524 __ b(vs, &nan);
7525 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
7526 __ mov(r0, Operand(LESS), LeaveCC, lt);
7527 __ mov(r0, Operand(GREATER), LeaveCC, gt);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007528 __ Ret();
Leon Clarkee46be812010-01-19 14:06:41 +00007529
7530 __ bind(&nan);
7531 // If one of the sides was a NaN then the v flag is set. Load r0 with
7532 // whatever it takes to make the comparison fail, since comparisons with NaN
7533 // always fail.
7534 if (cc_ == lt || cc_ == le) {
7535 __ mov(r0, Operand(GREATER));
7536 } else {
7537 __ mov(r0, Operand(LESS));
7538 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007539 __ Ret();
Steve Blockd0582a62009-12-15 09:54:21 +00007540 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00007541 // Checks for NaN in the doubles we have loaded. Can return the answer or
7542 // fall through if neither is a NaN. Also binds lhs_not_nan.
7543 EmitNanCheck(masm, &lhs_not_nan, cc_);
Steve Blockd0582a62009-12-15 09:54:21 +00007544 // Compares two doubles in r0, r1, r2, r3 that are not NaNs. Returns the
7545 // answer. Never falls through.
7546 EmitTwoNonNanDoubleComparison(masm, cc_);
7547 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007548
7549 __ bind(&not_smis);
7550 // At this point we know we are dealing with two different objects,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007551 // and neither of them is a Smi. The objects are in rhs_ and lhs_.
Steve Blocka7e24c12009-10-30 11:49:00 +00007552 if (strict_) {
7553 // This returns non-equal for some object types, or falls through if it
7554 // was not lucky.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007555 EmitStrictTwoHeapObjectCompare(masm, lhs_, rhs_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007556 }
7557
7558 Label check_for_symbols;
Leon Clarked91b9f72010-01-27 17:25:45 +00007559 Label flat_string_check;
Steve Blocka7e24c12009-10-30 11:49:00 +00007560 // Check for heap-number-heap-number comparison. Can jump to slow case,
7561 // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
7562 // that case. If the inputs are not doubles then jumps to check_for_symbols.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007563 // In this case r2 will contain the type of rhs_. Never falls through.
Steve Blocka7e24c12009-10-30 11:49:00 +00007564 EmitCheckForTwoHeapNumbers(masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007565 lhs_,
7566 rhs_,
Steve Blocka7e24c12009-10-30 11:49:00 +00007567 &both_loaded_as_doubles,
7568 &check_for_symbols,
Leon Clarked91b9f72010-01-27 17:25:45 +00007569 &flat_string_check);
Steve Blocka7e24c12009-10-30 11:49:00 +00007570
7571 __ bind(&check_for_symbols);
Leon Clarkee46be812010-01-19 14:06:41 +00007572 // In the strict case the EmitStrictTwoHeapObjectCompare already took care of
7573 // symbols.
7574 if (cc_ == eq && !strict_) {
Steve Block8defd9f2010-07-08 12:39:36 +01007575 // Returns an answer for two symbols or two detectable objects.
7576 // Otherwise jumps to string case or not both strings case.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007577 // Assumes that r2 is the type of rhs_ on entry.
7578 EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007579 }
7580
Leon Clarked91b9f72010-01-27 17:25:45 +00007581 // Check for both being sequential ASCII strings, and inline if that is the
7582 // case.
7583 __ bind(&flat_string_check);
7584
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007585 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow);
Leon Clarked91b9f72010-01-27 17:25:45 +00007586
7587 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
7588 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007589 lhs_,
7590 rhs_,
Leon Clarked91b9f72010-01-27 17:25:45 +00007591 r2,
7592 r3,
7593 r4,
7594 r5);
7595 // Never falls through to here.
7596
Steve Blocka7e24c12009-10-30 11:49:00 +00007597 __ bind(&slow);
Leon Clarked91b9f72010-01-27 17:25:45 +00007598
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007599 __ Push(lhs_, rhs_);
Steve Blocka7e24c12009-10-30 11:49:00 +00007600 // Figure out which native to call and setup the arguments.
7601 Builtins::JavaScript native;
Steve Blocka7e24c12009-10-30 11:49:00 +00007602 if (cc_ == eq) {
7603 native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
7604 } else {
7605 native = Builtins::COMPARE;
7606 int ncr; // NaN compare result
7607 if (cc_ == lt || cc_ == le) {
7608 ncr = GREATER;
7609 } else {
7610 ASSERT(cc_ == gt || cc_ == ge); // remaining cases
7611 ncr = LESS;
7612 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007613 __ mov(r0, Operand(Smi::FromInt(ncr)));
7614 __ push(r0);
7615 }
7616
7617 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
7618 // tagged as a small integer.
Leon Clarkee46be812010-01-19 14:06:41 +00007619 __ InvokeBuiltin(native, JUMP_JS);
Steve Blocka7e24c12009-10-30 11:49:00 +00007620}
7621
7622
Steve Blocka7e24c12009-10-30 11:49:00 +00007623// We fall into this code if the operands were Smis, but the result was
7624// not (eg. overflow). We branch into this code (to the not_smi label) if
7625// the operands were not both Smi. The operands are in r0 and r1. In order
7626// to call the C-implemented binary fp operation routines we need to end up
7627// with the double precision floating point operands in r0 and r1 (for the
7628// value in r1) and r2 and r3 (for the value in r0).
Steve Block6ded16b2010-05-10 14:33:55 +01007629void GenericBinaryOpStub::HandleBinaryOpSlowCases(
7630 MacroAssembler* masm,
7631 Label* not_smi,
7632 Register lhs,
7633 Register rhs,
7634 const Builtins::JavaScript& builtin) {
7635 Label slow, slow_reverse, do_the_call;
7636 bool use_fp_registers = CpuFeatures::IsSupported(VFP3) && Token::MOD != op_;
Steve Blockd0582a62009-12-15 09:54:21 +00007637
Steve Block6ded16b2010-05-10 14:33:55 +01007638 ASSERT((lhs.is(r0) && rhs.is(r1)) || (lhs.is(r1) && rhs.is(r0)));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007639 Register heap_number_map = r6;
Steve Block6ded16b2010-05-10 14:33:55 +01007640
7641 if (ShouldGenerateSmiCode()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007642 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
7643
Steve Block6ded16b2010-05-10 14:33:55 +01007644 // Smi-smi case (overflow).
7645 // Since both are Smis there is no heap number to overwrite, so allocate.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007646 // The new heap number is in r5. r3 and r7 are scratch.
7647 __ AllocateHeapNumber(
7648 r5, r3, r7, heap_number_map, lhs.is(r0) ? &slow_reverse : &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01007649
7650 // If we have floating point hardware, inline ADD, SUB, MUL, and DIV,
7651 // using registers d7 and d6 for the double values.
Steve Block8defd9f2010-07-08 12:39:36 +01007652 if (CpuFeatures::IsSupported(VFP3)) {
Steve Block6ded16b2010-05-10 14:33:55 +01007653 CpuFeatures::Scope scope(VFP3);
7654 __ mov(r7, Operand(rhs, ASR, kSmiTagSize));
7655 __ vmov(s15, r7);
7656 __ vcvt_f64_s32(d7, s15);
7657 __ mov(r7, Operand(lhs, ASR, kSmiTagSize));
7658 __ vmov(s13, r7);
7659 __ vcvt_f64_s32(d6, s13);
Steve Block8defd9f2010-07-08 12:39:36 +01007660 if (!use_fp_registers) {
7661 __ vmov(r2, r3, d7);
7662 __ vmov(r0, r1, d6);
7663 }
Steve Block6ded16b2010-05-10 14:33:55 +01007664 } else {
Steve Block8defd9f2010-07-08 12:39:36 +01007665 // Write Smi from rhs to r3 and r2 in double format. r9 is scratch.
Steve Block6ded16b2010-05-10 14:33:55 +01007666 __ mov(r7, Operand(rhs));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007667 ConvertToDoubleStub stub1(r3, r2, r7, r9);
Steve Block6ded16b2010-05-10 14:33:55 +01007668 __ push(lr);
7669 __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007670 // Write Smi from lhs to r1 and r0 in double format. r9 is scratch.
Steve Block6ded16b2010-05-10 14:33:55 +01007671 __ mov(r7, Operand(lhs));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007672 ConvertToDoubleStub stub2(r1, r0, r7, r9);
Steve Block6ded16b2010-05-10 14:33:55 +01007673 __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
7674 __ pop(lr);
7675 }
7676 __ jmp(&do_the_call); // Tail call. No return.
Steve Blockd0582a62009-12-15 09:54:21 +00007677 }
7678
Steve Block6ded16b2010-05-10 14:33:55 +01007679 // We branch here if at least one of r0 and r1 is not a Smi.
7680 __ bind(not_smi);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007681 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01007682
7683 // After this point we have the left hand side in r1 and the right hand side
7684 // in r0.
7685 if (lhs.is(r0)) {
7686 __ Swap(r0, r1, ip);
7687 }
7688
Leon Clarkeac952652010-07-15 11:15:24 +01007689 // The type transition also calculates the answer.
7690 bool generate_code_to_calculate_answer = true;
Steve Block6ded16b2010-05-10 14:33:55 +01007691
Leon Clarkeac952652010-07-15 11:15:24 +01007692 if (ShouldGenerateFPCode()) {
Steve Block6ded16b2010-05-10 14:33:55 +01007693 if (runtime_operands_type_ == BinaryOpIC::DEFAULT) {
7694 switch (op_) {
7695 case Token::ADD:
7696 case Token::SUB:
7697 case Token::MUL:
7698 case Token::DIV:
Leon Clarkeac952652010-07-15 11:15:24 +01007699 GenerateTypeTransition(masm); // Tail call.
7700 generate_code_to_calculate_answer = false;
Steve Block6ded16b2010-05-10 14:33:55 +01007701 break;
7702
7703 default:
7704 break;
7705 }
7706 }
7707
Leon Clarkeac952652010-07-15 11:15:24 +01007708 if (generate_code_to_calculate_answer) {
7709 Label r0_is_smi, r1_is_smi, finished_loading_r0, finished_loading_r1;
7710 if (mode_ == NO_OVERWRITE) {
7711 // In the case where there is no chance of an overwritable float we may
7712 // as well do the allocation immediately while r0 and r1 are untouched.
7713 __ AllocateHeapNumber(r5, r3, r7, heap_number_map, &slow);
Steve Block8defd9f2010-07-08 12:39:36 +01007714 }
Steve Block6ded16b2010-05-10 14:33:55 +01007715
Leon Clarkeac952652010-07-15 11:15:24 +01007716 // Move r0 to a double in r2-r3.
7717 __ tst(r0, Operand(kSmiTagMask));
7718 __ b(eq, &r0_is_smi); // It's a Smi so don't check it's a heap number.
7719 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
7720 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
7721 __ cmp(r4, heap_number_map);
7722 __ b(ne, &slow);
7723 if (mode_ == OVERWRITE_RIGHT) {
7724 __ mov(r5, Operand(r0)); // Overwrite this heap number.
Steve Block8defd9f2010-07-08 12:39:36 +01007725 }
Leon Clarkeac952652010-07-15 11:15:24 +01007726 if (use_fp_registers) {
7727 CpuFeatures::Scope scope(VFP3);
7728 // Load the double from tagged HeapNumber r0 to d7.
7729 __ sub(r7, r0, Operand(kHeapObjectTag));
7730 __ vldr(d7, r7, HeapNumber::kValueOffset);
Steve Block6ded16b2010-05-10 14:33:55 +01007731 } else {
Leon Clarkeac952652010-07-15 11:15:24 +01007732 // Calling convention says that second double is in r2 and r3.
7733 __ Ldrd(r2, r3, FieldMemOperand(r0, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007734 }
Leon Clarkeac952652010-07-15 11:15:24 +01007735 __ jmp(&finished_loading_r0);
7736 __ bind(&r0_is_smi);
7737 if (mode_ == OVERWRITE_RIGHT) {
7738 // We can't overwrite a Smi so get address of new heap number into r5.
7739 __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
7740 }
Steve Block6ded16b2010-05-10 14:33:55 +01007741
Leon Clarkeac952652010-07-15 11:15:24 +01007742 if (CpuFeatures::IsSupported(VFP3)) {
7743 CpuFeatures::Scope scope(VFP3);
7744 // Convert smi in r0 to double in d7.
7745 __ mov(r7, Operand(r0, ASR, kSmiTagSize));
7746 __ vmov(s15, r7);
7747 __ vcvt_f64_s32(d7, s15);
7748 if (!use_fp_registers) {
7749 __ vmov(r2, r3, d7);
7750 }
7751 } else {
7752 // Write Smi from r0 to r3 and r2 in double format.
7753 __ mov(r7, Operand(r0));
7754 ConvertToDoubleStub stub3(r3, r2, r7, r4);
7755 __ push(lr);
7756 __ Call(stub3.GetCode(), RelocInfo::CODE_TARGET);
7757 __ pop(lr);
7758 }
7759
7760 // HEAP_NUMBERS stub is slower than GENERIC on a pair of smis.
7761 // r0 is known to be a smi. If r1 is also a smi then switch to GENERIC.
7762 Label r1_is_not_smi;
7763 if (runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) {
7764 __ tst(r1, Operand(kSmiTagMask));
7765 __ b(ne, &r1_is_not_smi);
7766 GenerateTypeTransition(masm); // Tail call.
7767 }
7768
7769 __ bind(&finished_loading_r0);
7770
7771 // Move r1 to a double in r0-r1.
7772 __ tst(r1, Operand(kSmiTagMask));
7773 __ b(eq, &r1_is_smi); // It's a Smi so don't check it's a heap number.
7774 __ bind(&r1_is_not_smi);
7775 __ ldr(r4, FieldMemOperand(r1, HeapNumber::kMapOffset));
7776 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
7777 __ cmp(r4, heap_number_map);
7778 __ b(ne, &slow);
7779 if (mode_ == OVERWRITE_LEFT) {
7780 __ mov(r5, Operand(r1)); // Overwrite this heap number.
7781 }
7782 if (use_fp_registers) {
7783 CpuFeatures::Scope scope(VFP3);
7784 // Load the double from tagged HeapNumber r1 to d6.
7785 __ sub(r7, r1, Operand(kHeapObjectTag));
7786 __ vldr(d6, r7, HeapNumber::kValueOffset);
7787 } else {
7788 // Calling convention says that first double is in r0 and r1.
7789 __ Ldrd(r0, r1, FieldMemOperand(r1, HeapNumber::kValueOffset));
7790 }
7791 __ jmp(&finished_loading_r1);
7792 __ bind(&r1_is_smi);
7793 if (mode_ == OVERWRITE_LEFT) {
7794 // We can't overwrite a Smi so get address of new heap number into r5.
7795 __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
7796 }
7797
7798 if (CpuFeatures::IsSupported(VFP3)) {
7799 CpuFeatures::Scope scope(VFP3);
7800 // Convert smi in r1 to double in d6.
7801 __ mov(r7, Operand(r1, ASR, kSmiTagSize));
7802 __ vmov(s13, r7);
7803 __ vcvt_f64_s32(d6, s13);
7804 if (!use_fp_registers) {
7805 __ vmov(r0, r1, d6);
7806 }
7807 } else {
7808 // Write Smi from r1 to r1 and r0 in double format.
7809 __ mov(r7, Operand(r1));
7810 ConvertToDoubleStub stub4(r1, r0, r7, r9);
7811 __ push(lr);
7812 __ Call(stub4.GetCode(), RelocInfo::CODE_TARGET);
7813 __ pop(lr);
7814 }
7815
7816 __ bind(&finished_loading_r1);
Steve Block6ded16b2010-05-10 14:33:55 +01007817 }
Leon Clarkeac952652010-07-15 11:15:24 +01007818
7819 if (generate_code_to_calculate_answer || do_the_call.is_linked()) {
7820 __ bind(&do_the_call);
7821 // If we are inlining the operation using VFP3 instructions for
7822 // add, subtract, multiply, or divide, the arguments are in d6 and d7.
7823 if (use_fp_registers) {
7824 CpuFeatures::Scope scope(VFP3);
7825 // ARMv7 VFP3 instructions to implement
7826 // double precision, add, subtract, multiply, divide.
7827
7828 if (Token::MUL == op_) {
7829 __ vmul(d5, d6, d7);
7830 } else if (Token::DIV == op_) {
7831 __ vdiv(d5, d6, d7);
7832 } else if (Token::ADD == op_) {
7833 __ vadd(d5, d6, d7);
7834 } else if (Token::SUB == op_) {
7835 __ vsub(d5, d6, d7);
7836 } else {
7837 UNREACHABLE();
7838 }
7839 __ sub(r0, r5, Operand(kHeapObjectTag));
7840 __ vstr(d5, r0, HeapNumber::kValueOffset);
7841 __ add(r0, r0, Operand(kHeapObjectTag));
7842 __ mov(pc, lr);
7843 } else {
7844 // If we did not inline the operation, then the arguments are in:
7845 // r0: Left value (least significant part of mantissa).
7846 // r1: Left value (sign, exponent, top of mantissa).
7847 // r2: Right value (least significant part of mantissa).
7848 // r3: Right value (sign, exponent, top of mantissa).
7849 // r5: Address of heap number for result.
7850
7851 __ push(lr); // For later.
7852 __ PrepareCallCFunction(4, r4); // Two doubles count as 4 arguments.
7853 // Call C routine that may not cause GC or other trouble. r5 is callee
7854 // save.
7855 __ CallCFunction(ExternalReference::double_fp_operation(op_), 4);
7856 // Store answer in the overwritable heap number.
7857 #if !defined(USE_ARM_EABI)
7858 // Double returned in fp coprocessor register 0 and 1, encoded as
7859 // register cr8. Offsets must be divisible by 4 for coprocessor so we
7860 // need to substract the tag from r5.
7861 __ sub(r4, r5, Operand(kHeapObjectTag));
7862 __ stc(p1, cr8, MemOperand(r4, HeapNumber::kValueOffset));
7863 #else
7864 // Double returned in registers 0 and 1.
7865 __ Strd(r0, r1, FieldMemOperand(r5, HeapNumber::kValueOffset));
7866 #endif
7867 __ mov(r0, Operand(r5));
7868 // And we are done.
7869 __ pop(pc);
7870 }
7871 }
7872 }
7873
7874 if (!generate_code_to_calculate_answer &&
7875 !slow_reverse.is_linked() &&
7876 !slow.is_linked()) {
7877 return;
Steve Block6ded16b2010-05-10 14:33:55 +01007878 }
7879
Steve Block6ded16b2010-05-10 14:33:55 +01007880 if (lhs.is(r0)) {
7881 __ b(&slow);
7882 __ bind(&slow_reverse);
7883 __ Swap(r0, r1, ip);
7884 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007885
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007886 heap_number_map = no_reg; // Don't use this any more from here on.
7887
Steve Blocka7e24c12009-10-30 11:49:00 +00007888 // We jump to here if something goes wrong (one param is not a number of any
7889 // sort or new-space allocation fails).
7890 __ bind(&slow);
Steve Blockd0582a62009-12-15 09:54:21 +00007891
7892 // Push arguments to the stack
Steve Block6ded16b2010-05-10 14:33:55 +01007893 __ Push(r1, r0);
Steve Blockd0582a62009-12-15 09:54:21 +00007894
Steve Block6ded16b2010-05-10 14:33:55 +01007895 if (Token::ADD == op_) {
Steve Blockd0582a62009-12-15 09:54:21 +00007896 // Test for string arguments before calling runtime.
7897 // r1 : first argument
7898 // r0 : second argument
7899 // sp[0] : second argument
Andrei Popescu31002712010-02-23 13:46:05 +00007900 // sp[4] : first argument
Steve Blockd0582a62009-12-15 09:54:21 +00007901
Steve Block6ded16b2010-05-10 14:33:55 +01007902 Label not_strings, not_string1, string1, string1_smi2;
Steve Blockd0582a62009-12-15 09:54:21 +00007903 __ tst(r1, Operand(kSmiTagMask));
7904 __ b(eq, &not_string1);
7905 __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
7906 __ b(ge, &not_string1);
7907
7908 // First argument is a a string, test second.
7909 __ tst(r0, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007910 __ b(eq, &string1_smi2);
Steve Blockd0582a62009-12-15 09:54:21 +00007911 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
7912 __ b(ge, &string1);
7913
7914 // First and second argument are strings.
Steve Block6ded16b2010-05-10 14:33:55 +01007915 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
7916 __ TailCallStub(&string_add_stub);
7917
7918 __ bind(&string1_smi2);
7919 // First argument is a string, second is a smi. Try to lookup the number
7920 // string for the smi in the number string cache.
7921 NumberToStringStub::GenerateLookupNumberStringCache(
7922 masm, r0, r2, r4, r5, r6, true, &string1);
7923
7924 // Replace second argument on stack and tailcall string add stub to make
7925 // the result.
7926 __ str(r2, MemOperand(sp, 0));
7927 __ TailCallStub(&string_add_stub);
Steve Blockd0582a62009-12-15 09:54:21 +00007928
7929 // Only first argument is a string.
7930 __ bind(&string1);
7931 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_JS);
7932
7933 // First argument was not a string, test second.
7934 __ bind(&not_string1);
7935 __ tst(r0, Operand(kSmiTagMask));
7936 __ b(eq, &not_strings);
7937 __ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE);
7938 __ b(ge, &not_strings);
7939
7940 // Only second argument is a string.
Steve Blockd0582a62009-12-15 09:54:21 +00007941 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_JS);
7942
7943 __ bind(&not_strings);
7944 }
7945
Steve Blocka7e24c12009-10-30 11:49:00 +00007946 __ InvokeBuiltin(builtin, JUMP_JS); // Tail call. No return.
Steve Blocka7e24c12009-10-30 11:49:00 +00007947}
7948
7949
7950// Tries to get a signed int32 out of a double precision floating point heap
7951// number. Rounds towards 0. Fastest for doubles that are in the ranges
7952// -0x7fffffff to -0x40000000 or 0x40000000 to 0x7fffffff. This corresponds
7953// almost to the range of signed int32 values that are not Smis. Jumps to the
7954// label 'slow' if the double isn't in the range -0x80000000.0 to 0x80000000.0
7955// (excluding the endpoints).
7956static void GetInt32(MacroAssembler* masm,
7957 Register source,
7958 Register dest,
7959 Register scratch,
7960 Register scratch2,
7961 Label* slow) {
7962 Label right_exponent, done;
7963 // Get exponent word.
7964 __ ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
7965 // Get exponent alone in scratch2.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007966 __ Ubfx(scratch2,
7967 scratch,
7968 HeapNumber::kExponentShift,
7969 HeapNumber::kExponentBits);
Steve Blocka7e24c12009-10-30 11:49:00 +00007970 // Load dest with zero. We use this either for the final shift or
7971 // for the answer.
7972 __ mov(dest, Operand(0));
7973 // Check whether the exponent matches a 32 bit signed int that is not a Smi.
7974 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased). This is
7975 // the exponent that we are fastest at and also the highest exponent we can
7976 // handle here.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007977 const uint32_t non_smi_exponent = HeapNumber::kExponentBias + 30;
7978 // The non_smi_exponent, 0x41d, is too big for ARM's immediate field so we
7979 // split it up to avoid a constant pool entry. You can't do that in general
7980 // for cmp because of the overflow flag, but we know the exponent is in the
7981 // range 0-2047 so there is no overflow.
7982 int fudge_factor = 0x400;
7983 __ sub(scratch2, scratch2, Operand(fudge_factor));
7984 __ cmp(scratch2, Operand(non_smi_exponent - fudge_factor));
Steve Blocka7e24c12009-10-30 11:49:00 +00007985 // If we have a match of the int32-but-not-Smi exponent then skip some logic.
7986 __ b(eq, &right_exponent);
7987 // If the exponent is higher than that then go to slow case. This catches
7988 // numbers that don't fit in a signed int32, infinities and NaNs.
7989 __ b(gt, slow);
7990
7991 // We know the exponent is smaller than 30 (biased). If it is less than
7992 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
7993 // it rounds to zero.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007994 const uint32_t zero_exponent = HeapNumber::kExponentBias + 0;
7995 __ sub(scratch2, scratch2, Operand(zero_exponent - fudge_factor), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00007996 // Dest already has a Smi zero.
7997 __ b(lt, &done);
Steve Blockd0582a62009-12-15 09:54:21 +00007998 if (!CpuFeatures::IsSupported(VFP3)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007999 // We have an exponent between 0 and 30 in scratch2. Subtract from 30 to
8000 // get how much to shift down.
8001 __ rsb(dest, scratch2, Operand(30));
Steve Blockd0582a62009-12-15 09:54:21 +00008002 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008003 __ bind(&right_exponent);
Steve Blockd0582a62009-12-15 09:54:21 +00008004 if (CpuFeatures::IsSupported(VFP3)) {
8005 CpuFeatures::Scope scope(VFP3);
8006 // ARMv7 VFP3 instructions implementing double precision to integer
8007 // conversion using round to zero.
8008 __ ldr(scratch2, FieldMemOperand(source, HeapNumber::kMantissaOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008009 __ vmov(d7, scratch2, scratch);
Steve Block6ded16b2010-05-10 14:33:55 +01008010 __ vcvt_s32_f64(s15, d7);
Leon Clarkee46be812010-01-19 14:06:41 +00008011 __ vmov(dest, s15);
Steve Blockd0582a62009-12-15 09:54:21 +00008012 } else {
8013 // Get the top bits of the mantissa.
8014 __ and_(scratch2, scratch, Operand(HeapNumber::kMantissaMask));
8015 // Put back the implicit 1.
8016 __ orr(scratch2, scratch2, Operand(1 << HeapNumber::kExponentShift));
8017 // Shift up the mantissa bits to take up the space the exponent used to
8018 // take. We just orred in the implicit bit so that took care of one and
8019 // we want to leave the sign bit 0 so we subtract 2 bits from the shift
8020 // distance.
8021 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
8022 __ mov(scratch2, Operand(scratch2, LSL, shift_distance));
8023 // Put sign in zero flag.
8024 __ tst(scratch, Operand(HeapNumber::kSignMask));
8025 // Get the second half of the double. For some exponents we don't
8026 // actually need this because the bits get shifted out again, but
8027 // it's probably slower to test than just to do it.
8028 __ ldr(scratch, FieldMemOperand(source, HeapNumber::kMantissaOffset));
8029 // Shift down 22 bits to get the last 10 bits.
8030 __ orr(scratch, scratch2, Operand(scratch, LSR, 32 - shift_distance));
8031 // Move down according to the exponent.
8032 __ mov(dest, Operand(scratch, LSR, dest));
8033 // Fix sign if sign bit was set.
8034 __ rsb(dest, dest, Operand(0), LeaveCC, ne);
8035 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008036 __ bind(&done);
8037}
8038
Steve Blocka7e24c12009-10-30 11:49:00 +00008039// For bitwise ops where the inputs are not both Smis we here try to determine
8040// whether both inputs are either Smis or at least heap numbers that can be
8041// represented by a 32 bit signed value. We truncate towards zero as required
8042// by the ES spec. If this is the case we do the bitwise op and see if the
8043// result is a Smi. If so, great, otherwise we try to find a heap number to
8044// write the answer into (either by allocating or by overwriting).
Steve Block6ded16b2010-05-10 14:33:55 +01008045// On entry the operands are in lhs and rhs. On exit the answer is in r0.
8046void GenericBinaryOpStub::HandleNonSmiBitwiseOp(MacroAssembler* masm,
8047 Register lhs,
8048 Register rhs) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008049 Label slow, result_not_a_smi;
Steve Block6ded16b2010-05-10 14:33:55 +01008050 Label rhs_is_smi, lhs_is_smi;
8051 Label done_checking_rhs, done_checking_lhs;
Steve Blocka7e24c12009-10-30 11:49:00 +00008052
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008053 Register heap_number_map = r6;
8054 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
8055
Steve Block6ded16b2010-05-10 14:33:55 +01008056 __ tst(lhs, Operand(kSmiTagMask));
8057 __ b(eq, &lhs_is_smi); // It's a Smi so don't check it's a heap number.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008058 __ ldr(r4, FieldMemOperand(lhs, HeapNumber::kMapOffset));
8059 __ cmp(r4, heap_number_map);
Steve Blocka7e24c12009-10-30 11:49:00 +00008060 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008061 GetInt32(masm, lhs, r3, r5, r4, &slow);
8062 __ jmp(&done_checking_lhs);
8063 __ bind(&lhs_is_smi);
8064 __ mov(r3, Operand(lhs, ASR, 1));
8065 __ bind(&done_checking_lhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00008066
Steve Block6ded16b2010-05-10 14:33:55 +01008067 __ tst(rhs, Operand(kSmiTagMask));
8068 __ b(eq, &rhs_is_smi); // It's a Smi so don't check it's a heap number.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008069 __ ldr(r4, FieldMemOperand(rhs, HeapNumber::kMapOffset));
8070 __ cmp(r4, heap_number_map);
Steve Blocka7e24c12009-10-30 11:49:00 +00008071 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008072 GetInt32(masm, rhs, r2, r5, r4, &slow);
8073 __ jmp(&done_checking_rhs);
8074 __ bind(&rhs_is_smi);
8075 __ mov(r2, Operand(rhs, ASR, 1));
8076 __ bind(&done_checking_rhs);
8077
8078 ASSERT(((lhs.is(r0) && rhs.is(r1)) || (lhs.is(r1) && rhs.is(r0))));
Steve Blocka7e24c12009-10-30 11:49:00 +00008079
8080 // r0 and r1: Original operands (Smi or heap numbers).
8081 // r2 and r3: Signed int32 operands.
8082 switch (op_) {
8083 case Token::BIT_OR: __ orr(r2, r2, Operand(r3)); break;
8084 case Token::BIT_XOR: __ eor(r2, r2, Operand(r3)); break;
8085 case Token::BIT_AND: __ and_(r2, r2, Operand(r3)); break;
8086 case Token::SAR:
8087 // Use only the 5 least significant bits of the shift count.
8088 __ and_(r2, r2, Operand(0x1f));
8089 __ mov(r2, Operand(r3, ASR, r2));
8090 break;
8091 case Token::SHR:
8092 // Use only the 5 least significant bits of the shift count.
8093 __ and_(r2, r2, Operand(0x1f));
8094 __ mov(r2, Operand(r3, LSR, r2), SetCC);
8095 // SHR is special because it is required to produce a positive answer.
8096 // The code below for writing into heap numbers isn't capable of writing
8097 // the register as an unsigned int so we go to slow case if we hit this
8098 // case.
Steve Block8defd9f2010-07-08 12:39:36 +01008099 if (CpuFeatures::IsSupported(VFP3)) {
8100 __ b(mi, &result_not_a_smi);
8101 } else {
8102 __ b(mi, &slow);
8103 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008104 break;
8105 case Token::SHL:
8106 // Use only the 5 least significant bits of the shift count.
8107 __ and_(r2, r2, Operand(0x1f));
8108 __ mov(r2, Operand(r3, LSL, r2));
8109 break;
8110 default: UNREACHABLE();
8111 }
8112 // check that the *signed* result fits in a smi
8113 __ add(r3, r2, Operand(0x40000000), SetCC);
8114 __ b(mi, &result_not_a_smi);
8115 __ mov(r0, Operand(r2, LSL, kSmiTagSize));
8116 __ Ret();
8117
8118 Label have_to_allocate, got_a_heap_number;
8119 __ bind(&result_not_a_smi);
8120 switch (mode_) {
8121 case OVERWRITE_RIGHT: {
Steve Block6ded16b2010-05-10 14:33:55 +01008122 __ tst(rhs, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008123 __ b(eq, &have_to_allocate);
Steve Block6ded16b2010-05-10 14:33:55 +01008124 __ mov(r5, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00008125 break;
8126 }
8127 case OVERWRITE_LEFT: {
Steve Block6ded16b2010-05-10 14:33:55 +01008128 __ tst(lhs, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008129 __ b(eq, &have_to_allocate);
Steve Block6ded16b2010-05-10 14:33:55 +01008130 __ mov(r5, Operand(lhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00008131 break;
8132 }
8133 case NO_OVERWRITE: {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008134 // Get a new heap number in r5. r4 and r7 are scratch.
8135 __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008136 }
8137 default: break;
8138 }
8139 __ bind(&got_a_heap_number);
8140 // r2: Answer as signed int32.
8141 // r5: Heap number to write answer into.
8142
8143 // Nothing can go wrong now, so move the heap number to r0, which is the
8144 // result.
8145 __ mov(r0, Operand(r5));
8146
Steve Block8defd9f2010-07-08 12:39:36 +01008147 if (CpuFeatures::IsSupported(VFP3)) {
8148 // Convert the int32 in r2 to the heap number in r0. r3 is corrupted.
8149 CpuFeatures::Scope scope(VFP3);
8150 __ vmov(s0, r2);
8151 if (op_ == Token::SHR) {
8152 __ vcvt_f64_u32(d0, s0);
8153 } else {
8154 __ vcvt_f64_s32(d0, s0);
8155 }
8156 __ sub(r3, r0, Operand(kHeapObjectTag));
8157 __ vstr(d0, r3, HeapNumber::kValueOffset);
8158 __ Ret();
8159 } else {
8160 // Tail call that writes the int32 in r2 to the heap number in r0, using
8161 // r3 as scratch. r0 is preserved and returned.
8162 WriteInt32ToHeapNumberStub stub(r2, r0, r3);
8163 __ TailCallStub(&stub);
8164 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008165
8166 if (mode_ != NO_OVERWRITE) {
8167 __ bind(&have_to_allocate);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008168 // Get a new heap number in r5. r4 and r7 are scratch.
8169 __ AllocateHeapNumber(r5, r4, r7, heap_number_map, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008170 __ jmp(&got_a_heap_number);
8171 }
8172
8173 // If all else failed then we go to the runtime system.
8174 __ bind(&slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008175 __ Push(lhs, rhs); // Restore stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00008176 switch (op_) {
8177 case Token::BIT_OR:
8178 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
8179 break;
8180 case Token::BIT_AND:
8181 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
8182 break;
8183 case Token::BIT_XOR:
8184 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
8185 break;
8186 case Token::SAR:
8187 __ InvokeBuiltin(Builtins::SAR, JUMP_JS);
8188 break;
8189 case Token::SHR:
8190 __ InvokeBuiltin(Builtins::SHR, JUMP_JS);
8191 break;
8192 case Token::SHL:
8193 __ InvokeBuiltin(Builtins::SHL, JUMP_JS);
8194 break;
8195 default:
8196 UNREACHABLE();
8197 }
8198}
8199
8200
8201// Can we multiply by x with max two shifts and an add.
8202// This answers yes to all integers from 2 to 10.
8203static bool IsEasyToMultiplyBy(int x) {
8204 if (x < 2) return false; // Avoid special cases.
8205 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
8206 if (IsPowerOf2(x)) return true; // Simple shift.
8207 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
8208 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
8209 return false;
8210}
8211
8212
8213// Can multiply by anything that IsEasyToMultiplyBy returns true for.
8214// Source and destination may be the same register. This routine does
8215// not set carry and overflow the way a mul instruction would.
8216static void MultiplyByKnownInt(MacroAssembler* masm,
8217 Register source,
8218 Register destination,
8219 int known_int) {
8220 if (IsPowerOf2(known_int)) {
8221 __ mov(destination, Operand(source, LSL, BitPosition(known_int)));
8222 } else if (PopCountLessThanEqual2(known_int)) {
8223 int first_bit = BitPosition(known_int);
8224 int second_bit = BitPosition(known_int ^ (1 << first_bit));
8225 __ add(destination, source, Operand(source, LSL, second_bit - first_bit));
8226 if (first_bit != 0) {
8227 __ mov(destination, Operand(destination, LSL, first_bit));
8228 }
8229 } else {
8230 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
8231 int the_bit = BitPosition(known_int + 1);
8232 __ rsb(destination, source, Operand(source, LSL, the_bit));
8233 }
8234}
8235
8236
8237// This function (as opposed to MultiplyByKnownInt) takes the known int in a
8238// a register for the cases where it doesn't know a good trick, and may deliver
8239// a result that needs shifting.
8240static void MultiplyByKnownInt2(
8241 MacroAssembler* masm,
8242 Register result,
8243 Register source,
8244 Register known_int_register, // Smi tagged.
8245 int known_int,
8246 int* required_shift) { // Including Smi tag shift
8247 switch (known_int) {
8248 case 3:
8249 __ add(result, source, Operand(source, LSL, 1));
8250 *required_shift = 1;
8251 break;
8252 case 5:
8253 __ add(result, source, Operand(source, LSL, 2));
8254 *required_shift = 1;
8255 break;
8256 case 6:
8257 __ add(result, source, Operand(source, LSL, 1));
8258 *required_shift = 2;
8259 break;
8260 case 7:
8261 __ rsb(result, source, Operand(source, LSL, 3));
8262 *required_shift = 1;
8263 break;
8264 case 9:
8265 __ add(result, source, Operand(source, LSL, 3));
8266 *required_shift = 1;
8267 break;
8268 case 10:
8269 __ add(result, source, Operand(source, LSL, 2));
8270 *required_shift = 2;
8271 break;
8272 default:
8273 ASSERT(!IsPowerOf2(known_int)); // That would be very inefficient.
8274 __ mul(result, source, known_int_register);
8275 *required_shift = 0;
8276 }
8277}
8278
8279
Steve Block8defd9f2010-07-08 12:39:36 +01008280// This uses versions of the sum-of-digits-to-see-if-a-number-is-divisible-by-3
8281// trick. See http://en.wikipedia.org/wiki/Divisibility_rule
8282// Takes the sum of the digits base (mask + 1) repeatedly until we have a
8283// number from 0 to mask. On exit the 'eq' condition flags are set if the
8284// answer is exactly the mask.
8285void IntegerModStub::DigitSum(MacroAssembler* masm,
8286 Register lhs,
8287 int mask,
8288 int shift,
8289 Label* entry) {
8290 ASSERT(mask > 0);
8291 ASSERT(mask <= 0xff); // This ensures we don't need ip to use it.
8292 Label loop;
8293 __ bind(&loop);
8294 __ and_(ip, lhs, Operand(mask));
8295 __ add(lhs, ip, Operand(lhs, LSR, shift));
8296 __ bind(entry);
8297 __ cmp(lhs, Operand(mask));
8298 __ b(gt, &loop);
8299}
8300
8301
8302void IntegerModStub::DigitSum(MacroAssembler* masm,
8303 Register lhs,
8304 Register scratch,
8305 int mask,
8306 int shift1,
8307 int shift2,
8308 Label* entry) {
8309 ASSERT(mask > 0);
8310 ASSERT(mask <= 0xff); // This ensures we don't need ip to use it.
8311 Label loop;
8312 __ bind(&loop);
8313 __ bic(scratch, lhs, Operand(mask));
8314 __ and_(ip, lhs, Operand(mask));
8315 __ add(lhs, ip, Operand(lhs, LSR, shift1));
8316 __ add(lhs, lhs, Operand(scratch, LSR, shift2));
8317 __ bind(entry);
8318 __ cmp(lhs, Operand(mask));
8319 __ b(gt, &loop);
8320}
8321
8322
8323// Splits the number into two halves (bottom half has shift bits). The top
8324// half is subtracted from the bottom half. If the result is negative then
8325// rhs is added.
8326void IntegerModStub::ModGetInRangeBySubtraction(MacroAssembler* masm,
8327 Register lhs,
8328 int shift,
8329 int rhs) {
8330 int mask = (1 << shift) - 1;
8331 __ and_(ip, lhs, Operand(mask));
8332 __ sub(lhs, ip, Operand(lhs, LSR, shift), SetCC);
8333 __ add(lhs, lhs, Operand(rhs), LeaveCC, mi);
8334}
8335
8336
8337void IntegerModStub::ModReduce(MacroAssembler* masm,
8338 Register lhs,
8339 int max,
8340 int denominator) {
8341 int limit = denominator;
8342 while (limit * 2 <= max) limit *= 2;
8343 while (limit >= denominator) {
8344 __ cmp(lhs, Operand(limit));
8345 __ sub(lhs, lhs, Operand(limit), LeaveCC, ge);
8346 limit >>= 1;
8347 }
8348}
8349
8350
8351void IntegerModStub::ModAnswer(MacroAssembler* masm,
8352 Register result,
8353 Register shift_distance,
8354 Register mask_bits,
8355 Register sum_of_digits) {
8356 __ add(result, mask_bits, Operand(sum_of_digits, LSL, shift_distance));
8357 __ Ret();
8358}
8359
8360
8361// See comment for class.
8362void IntegerModStub::Generate(MacroAssembler* masm) {
8363 __ mov(lhs_, Operand(lhs_, LSR, shift_distance_));
8364 __ bic(odd_number_, odd_number_, Operand(1));
8365 __ mov(odd_number_, Operand(odd_number_, LSL, 1));
8366 // We now have (odd_number_ - 1) * 2 in the register.
8367 // Build a switch out of branches instead of data because it avoids
8368 // having to teach the assembler about intra-code-object pointers
8369 // that are not in relative branch instructions.
8370 Label mod3, mod5, mod7, mod9, mod11, mod13, mod15, mod17, mod19;
8371 Label mod21, mod23, mod25;
8372 { Assembler::BlockConstPoolScope block_const_pool(masm);
8373 __ add(pc, pc, Operand(odd_number_));
8374 // When you read pc it is always 8 ahead, but when you write it you always
8375 // write the actual value. So we put in two nops to take up the slack.
8376 __ nop();
8377 __ nop();
8378 __ b(&mod3);
8379 __ b(&mod5);
8380 __ b(&mod7);
8381 __ b(&mod9);
8382 __ b(&mod11);
8383 __ b(&mod13);
8384 __ b(&mod15);
8385 __ b(&mod17);
8386 __ b(&mod19);
8387 __ b(&mod21);
8388 __ b(&mod23);
8389 __ b(&mod25);
8390 }
8391
8392 // For each denominator we find a multiple that is almost only ones
8393 // when expressed in binary. Then we do the sum-of-digits trick for
8394 // that number. If the multiple is not 1 then we have to do a little
8395 // more work afterwards to get the answer into the 0-denominator-1
8396 // range.
8397 DigitSum(masm, lhs_, 3, 2, &mod3); // 3 = b11.
8398 __ sub(lhs_, lhs_, Operand(3), LeaveCC, eq);
8399 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8400
8401 DigitSum(masm, lhs_, 0xf, 4, &mod5); // 5 * 3 = b1111.
8402 ModGetInRangeBySubtraction(masm, lhs_, 2, 5);
8403 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8404
8405 DigitSum(masm, lhs_, 7, 3, &mod7); // 7 = b111.
8406 __ sub(lhs_, lhs_, Operand(7), LeaveCC, eq);
8407 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8408
8409 DigitSum(masm, lhs_, 0x3f, 6, &mod9); // 7 * 9 = b111111.
8410 ModGetInRangeBySubtraction(masm, lhs_, 3, 9);
8411 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8412
8413 DigitSum(masm, lhs_, r5, 0x3f, 6, 3, &mod11); // 5 * 11 = b110111.
8414 ModReduce(masm, lhs_, 0x3f, 11);
8415 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8416
8417 DigitSum(masm, lhs_, r5, 0xff, 8, 5, &mod13); // 19 * 13 = b11110111.
8418 ModReduce(masm, lhs_, 0xff, 13);
8419 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8420
8421 DigitSum(masm, lhs_, 0xf, 4, &mod15); // 15 = b1111.
8422 __ sub(lhs_, lhs_, Operand(15), LeaveCC, eq);
8423 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8424
8425 DigitSum(masm, lhs_, 0xff, 8, &mod17); // 15 * 17 = b11111111.
8426 ModGetInRangeBySubtraction(masm, lhs_, 4, 17);
8427 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8428
8429 DigitSum(masm, lhs_, r5, 0xff, 8, 5, &mod19); // 13 * 19 = b11110111.
8430 ModReduce(masm, lhs_, 0xff, 19);
8431 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8432
8433 DigitSum(masm, lhs_, 0x3f, 6, &mod21); // 3 * 21 = b111111.
8434 ModReduce(masm, lhs_, 0x3f, 21);
8435 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8436
8437 DigitSum(masm, lhs_, r5, 0xff, 8, 7, &mod23); // 11 * 23 = b11111101.
8438 ModReduce(masm, lhs_, 0xff, 23);
8439 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8440
8441 DigitSum(masm, lhs_, r5, 0x7f, 7, 6, &mod25); // 5 * 25 = b1111101.
8442 ModReduce(masm, lhs_, 0x7f, 25);
8443 ModAnswer(masm, result_, shift_distance_, mask_bits_, lhs_);
8444}
8445
8446
Leon Clarkee46be812010-01-19 14:06:41 +00008447const char* GenericBinaryOpStub::GetName() {
8448 if (name_ != NULL) return name_;
8449 const int len = 100;
8450 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
8451 if (name_ == NULL) return "OOM";
8452 const char* op_name = Token::Name(op_);
8453 const char* overwrite_name;
8454 switch (mode_) {
8455 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
8456 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
8457 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
8458 default: overwrite_name = "UnknownOverwrite"; break;
8459 }
8460
8461 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008462 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00008463 op_name,
8464 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008465 specialized_on_rhs_ ? "_ConstantRhs" : "",
8466 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00008467 return name_;
8468}
8469
8470
Andrei Popescu31002712010-02-23 13:46:05 +00008471
Steve Blocka7e24c12009-10-30 11:49:00 +00008472void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01008473 // lhs_ : x
8474 // rhs_ : y
8475 // r0 : result
Steve Blocka7e24c12009-10-30 11:49:00 +00008476
Steve Block6ded16b2010-05-10 14:33:55 +01008477 Register result = r0;
8478 Register lhs = lhs_;
8479 Register rhs = rhs_;
8480
8481 // This code can't cope with other register allocations yet.
8482 ASSERT(result.is(r0) &&
8483 ((lhs.is(r0) && rhs.is(r1)) ||
8484 (lhs.is(r1) && rhs.is(r0))));
8485
8486 Register smi_test_reg = VirtualFrame::scratch0();
8487 Register scratch = VirtualFrame::scratch1();
8488
8489 // All ops need to know whether we are dealing with two Smis. Set up
8490 // smi_test_reg to tell us that.
8491 if (ShouldGenerateSmiCode()) {
8492 __ orr(smi_test_reg, lhs, Operand(rhs));
8493 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008494
8495 switch (op_) {
8496 case Token::ADD: {
8497 Label not_smi;
8498 // Fast path.
Steve Block6ded16b2010-05-10 14:33:55 +01008499 if (ShouldGenerateSmiCode()) {
8500 ASSERT(kSmiTag == 0); // Adjust code below.
8501 __ tst(smi_test_reg, Operand(kSmiTagMask));
8502 __ b(ne, &not_smi);
8503 __ add(r0, r1, Operand(r0), SetCC); // Add y optimistically.
8504 // Return if no overflow.
8505 __ Ret(vc);
8506 __ sub(r0, r0, Operand(r1)); // Revert optimistic add.
8507 }
8508 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::ADD);
Steve Blocka7e24c12009-10-30 11:49:00 +00008509 break;
8510 }
8511
8512 case Token::SUB: {
8513 Label not_smi;
8514 // Fast path.
Steve Block6ded16b2010-05-10 14:33:55 +01008515 if (ShouldGenerateSmiCode()) {
8516 ASSERT(kSmiTag == 0); // Adjust code below.
8517 __ tst(smi_test_reg, Operand(kSmiTagMask));
8518 __ b(ne, &not_smi);
8519 if (lhs.is(r1)) {
8520 __ sub(r0, r1, Operand(r0), SetCC); // Subtract y optimistically.
8521 // Return if no overflow.
8522 __ Ret(vc);
8523 __ sub(r0, r1, Operand(r0)); // Revert optimistic subtract.
8524 } else {
8525 __ sub(r0, r0, Operand(r1), SetCC); // Subtract y optimistically.
8526 // Return if no overflow.
8527 __ Ret(vc);
8528 __ add(r0, r0, Operand(r1)); // Revert optimistic subtract.
8529 }
8530 }
8531 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::SUB);
Steve Blocka7e24c12009-10-30 11:49:00 +00008532 break;
8533 }
8534
8535 case Token::MUL: {
8536 Label not_smi, slow;
Steve Block6ded16b2010-05-10 14:33:55 +01008537 if (ShouldGenerateSmiCode()) {
8538 ASSERT(kSmiTag == 0); // adjust code below
8539 __ tst(smi_test_reg, Operand(kSmiTagMask));
8540 Register scratch2 = smi_test_reg;
8541 smi_test_reg = no_reg;
8542 __ b(ne, &not_smi);
8543 // Remove tag from one operand (but keep sign), so that result is Smi.
8544 __ mov(ip, Operand(rhs, ASR, kSmiTagSize));
8545 // Do multiplication
8546 // scratch = lower 32 bits of ip * lhs.
8547 __ smull(scratch, scratch2, lhs, ip);
8548 // Go slow on overflows (overflow bit is not set).
8549 __ mov(ip, Operand(scratch, ASR, 31));
8550 // No overflow if higher 33 bits are identical.
8551 __ cmp(ip, Operand(scratch2));
8552 __ b(ne, &slow);
8553 // Go slow on zero result to handle -0.
8554 __ tst(scratch, Operand(scratch));
8555 __ mov(result, Operand(scratch), LeaveCC, ne);
8556 __ Ret(ne);
8557 // We need -0 if we were multiplying a negative number with 0 to get 0.
8558 // We know one of them was zero.
8559 __ add(scratch2, rhs, Operand(lhs), SetCC);
8560 __ mov(result, Operand(Smi::FromInt(0)), LeaveCC, pl);
8561 __ Ret(pl); // Return Smi 0 if the non-zero one was positive.
8562 // Slow case. We fall through here if we multiplied a negative number
8563 // with 0, because that would mean we should produce -0.
8564 __ bind(&slow);
8565 }
8566 HandleBinaryOpSlowCases(masm, &not_smi, lhs, rhs, Builtins::MUL);
Steve Blocka7e24c12009-10-30 11:49:00 +00008567 break;
8568 }
8569
8570 case Token::DIV:
8571 case Token::MOD: {
8572 Label not_smi;
Steve Block6ded16b2010-05-10 14:33:55 +01008573 if (ShouldGenerateSmiCode() && specialized_on_rhs_) {
Steve Block8defd9f2010-07-08 12:39:36 +01008574 Label lhs_is_unsuitable;
Steve Block6ded16b2010-05-10 14:33:55 +01008575 __ BranchOnNotSmi(lhs, &not_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00008576 if (IsPowerOf2(constant_rhs_)) {
8577 if (op_ == Token::MOD) {
Steve Block6ded16b2010-05-10 14:33:55 +01008578 __ and_(rhs,
8579 lhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008580 Operand(0x80000000u | ((constant_rhs_ << kSmiTagSize) - 1)),
8581 SetCC);
8582 // We now have the answer, but if the input was negative we also
8583 // have the sign bit. Our work is done if the result is
8584 // positive or zero:
Steve Block6ded16b2010-05-10 14:33:55 +01008585 if (!rhs.is(r0)) {
8586 __ mov(r0, rhs, LeaveCC, pl);
8587 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008588 __ Ret(pl);
8589 // A mod of a negative left hand side must return a negative number.
8590 // Unfortunately if the answer is 0 then we must return -0. And we
Steve Block6ded16b2010-05-10 14:33:55 +01008591 // already optimistically trashed rhs so we may need to restore it.
8592 __ eor(rhs, rhs, Operand(0x80000000u), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00008593 // Next two instructions are conditional on the answer being -0.
Steve Block6ded16b2010-05-10 14:33:55 +01008594 __ mov(rhs, Operand(Smi::FromInt(constant_rhs_)), LeaveCC, eq);
Steve Block8defd9f2010-07-08 12:39:36 +01008595 __ b(eq, &lhs_is_unsuitable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008596 // We need to subtract the dividend. Eg. -3 % 4 == -3.
Steve Block6ded16b2010-05-10 14:33:55 +01008597 __ sub(result, rhs, Operand(Smi::FromInt(constant_rhs_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00008598 } else {
8599 ASSERT(op_ == Token::DIV);
Steve Block6ded16b2010-05-10 14:33:55 +01008600 __ tst(lhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008601 Operand(0x80000000u | ((constant_rhs_ << kSmiTagSize) - 1)));
Steve Block8defd9f2010-07-08 12:39:36 +01008602 __ b(ne, &lhs_is_unsuitable); // Go slow on negative or remainder.
Steve Blocka7e24c12009-10-30 11:49:00 +00008603 int shift = 0;
8604 int d = constant_rhs_;
8605 while ((d & 1) == 0) {
8606 d >>= 1;
8607 shift++;
8608 }
Steve Block6ded16b2010-05-10 14:33:55 +01008609 __ mov(r0, Operand(lhs, LSR, shift));
Steve Blocka7e24c12009-10-30 11:49:00 +00008610 __ bic(r0, r0, Operand(kSmiTagMask));
8611 }
8612 } else {
8613 // Not a power of 2.
Steve Block6ded16b2010-05-10 14:33:55 +01008614 __ tst(lhs, Operand(0x80000000u));
Steve Block8defd9f2010-07-08 12:39:36 +01008615 __ b(ne, &lhs_is_unsuitable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008616 // Find a fixed point reciprocal of the divisor so we can divide by
8617 // multiplying.
8618 double divisor = 1.0 / constant_rhs_;
8619 int shift = 32;
8620 double scale = 4294967296.0; // 1 << 32.
8621 uint32_t mul;
8622 // Maximise the precision of the fixed point reciprocal.
8623 while (true) {
8624 mul = static_cast<uint32_t>(scale * divisor);
8625 if (mul >= 0x7fffffff) break;
8626 scale *= 2.0;
8627 shift++;
8628 }
8629 mul++;
Steve Block6ded16b2010-05-10 14:33:55 +01008630 Register scratch2 = smi_test_reg;
8631 smi_test_reg = no_reg;
8632 __ mov(scratch2, Operand(mul));
8633 __ umull(scratch, scratch2, scratch2, lhs);
8634 __ mov(scratch2, Operand(scratch2, LSR, shift - 31));
8635 // scratch2 is lhs / rhs. scratch2 is not Smi tagged.
8636 // rhs is still the known rhs. rhs is Smi tagged.
8637 // lhs is still the unkown lhs. lhs is Smi tagged.
8638 int required_scratch_shift = 0; // Including the Smi tag shift of 1.
8639 // scratch = scratch2 * rhs.
Steve Blocka7e24c12009-10-30 11:49:00 +00008640 MultiplyByKnownInt2(masm,
Steve Block6ded16b2010-05-10 14:33:55 +01008641 scratch,
8642 scratch2,
8643 rhs,
Steve Blocka7e24c12009-10-30 11:49:00 +00008644 constant_rhs_,
Steve Block6ded16b2010-05-10 14:33:55 +01008645 &required_scratch_shift);
8646 // scratch << required_scratch_shift is now the Smi tagged rhs *
8647 // (lhs / rhs) where / indicates integer division.
Steve Blocka7e24c12009-10-30 11:49:00 +00008648 if (op_ == Token::DIV) {
Steve Block6ded16b2010-05-10 14:33:55 +01008649 __ cmp(lhs, Operand(scratch, LSL, required_scratch_shift));
Steve Block8defd9f2010-07-08 12:39:36 +01008650 __ b(ne, &lhs_is_unsuitable); // There was a remainder.
Steve Block6ded16b2010-05-10 14:33:55 +01008651 __ mov(result, Operand(scratch2, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008652 } else {
8653 ASSERT(op_ == Token::MOD);
Steve Block6ded16b2010-05-10 14:33:55 +01008654 __ sub(result, lhs, Operand(scratch, LSL, required_scratch_shift));
Steve Blocka7e24c12009-10-30 11:49:00 +00008655 }
8656 }
8657 __ Ret();
Steve Block8defd9f2010-07-08 12:39:36 +01008658 __ bind(&lhs_is_unsuitable);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008659 } else if (op_ == Token::MOD &&
8660 runtime_operands_type_ != BinaryOpIC::HEAP_NUMBERS &&
8661 runtime_operands_type_ != BinaryOpIC::STRINGS) {
8662 // Do generate a bit of smi code for modulus even though the default for
8663 // modulus is not to do it, but as the ARM processor has no coprocessor
Steve Block8defd9f2010-07-08 12:39:36 +01008664 // support for modulus checking for smis makes sense. We can handle
8665 // 1 to 25 times any power of 2. This covers over half the numbers from
8666 // 1 to 100 including all of the first 25. (Actually the constants < 10
8667 // are handled above by reciprocal multiplication. We only get here for
8668 // those cases if the right hand side is not a constant or for cases
8669 // like 192 which is 3*2^6 and ends up in the 3 case in the integer mod
8670 // stub.)
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008671 Label slow;
Steve Block8defd9f2010-07-08 12:39:36 +01008672 Label not_power_of_2;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008673 ASSERT(!ShouldGenerateSmiCode());
8674 ASSERT(kSmiTag == 0); // Adjust code below.
8675 // Check for two positive smis.
8676 __ orr(smi_test_reg, lhs, Operand(rhs));
8677 __ tst(smi_test_reg, Operand(0x80000000u | kSmiTagMask));
8678 __ b(ne, &slow);
8679 // Check that rhs is a power of two and not zero.
Steve Block8defd9f2010-07-08 12:39:36 +01008680 Register mask_bits = r3;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008681 __ sub(scratch, rhs, Operand(1), SetCC);
8682 __ b(mi, &slow);
Steve Block8defd9f2010-07-08 12:39:36 +01008683 __ and_(mask_bits, rhs, Operand(scratch), SetCC);
8684 __ b(ne, &not_power_of_2);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008685 // Calculate power of two modulus.
8686 __ and_(result, lhs, Operand(scratch));
8687 __ Ret();
Steve Block8defd9f2010-07-08 12:39:36 +01008688
8689 __ bind(&not_power_of_2);
8690 __ eor(scratch, scratch, Operand(mask_bits));
8691 // At least two bits are set in the modulus. The high one(s) are in
8692 // mask_bits and the low one is scratch + 1.
8693 __ and_(mask_bits, scratch, Operand(lhs));
8694 Register shift_distance = scratch;
8695 scratch = no_reg;
8696
8697 // The rhs consists of a power of 2 multiplied by some odd number.
8698 // The power-of-2 part we handle by putting the corresponding bits
8699 // from the lhs in the mask_bits register, and the power in the
8700 // shift_distance register. Shift distance is never 0 due to Smi
8701 // tagging.
8702 __ CountLeadingZeros(r4, shift_distance, shift_distance);
8703 __ rsb(shift_distance, r4, Operand(32));
8704
8705 // Now we need to find out what the odd number is. The last bit is
8706 // always 1.
8707 Register odd_number = r4;
8708 __ mov(odd_number, Operand(rhs, LSR, shift_distance));
8709 __ cmp(odd_number, Operand(25));
8710 __ b(gt, &slow);
8711
8712 IntegerModStub stub(
8713 result, shift_distance, odd_number, mask_bits, lhs, r5);
8714 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); // Tail call.
8715
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008716 __ bind(&slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00008717 }
Steve Block6ded16b2010-05-10 14:33:55 +01008718 HandleBinaryOpSlowCases(
8719 masm,
8720 &not_smi,
8721 lhs,
8722 rhs,
8723 op_ == Token::MOD ? Builtins::MOD : Builtins::DIV);
Steve Blocka7e24c12009-10-30 11:49:00 +00008724 break;
8725 }
8726
8727 case Token::BIT_OR:
8728 case Token::BIT_AND:
8729 case Token::BIT_XOR:
8730 case Token::SAR:
8731 case Token::SHR:
8732 case Token::SHL: {
8733 Label slow;
8734 ASSERT(kSmiTag == 0); // adjust code below
Steve Block6ded16b2010-05-10 14:33:55 +01008735 __ tst(smi_test_reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008736 __ b(ne, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008737 Register scratch2 = smi_test_reg;
8738 smi_test_reg = no_reg;
Steve Blocka7e24c12009-10-30 11:49:00 +00008739 switch (op_) {
Steve Block6ded16b2010-05-10 14:33:55 +01008740 case Token::BIT_OR: __ orr(result, rhs, Operand(lhs)); break;
8741 case Token::BIT_AND: __ and_(result, rhs, Operand(lhs)); break;
8742 case Token::BIT_XOR: __ eor(result, rhs, Operand(lhs)); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008743 case Token::SAR:
8744 // Remove tags from right operand.
Steve Block6ded16b2010-05-10 14:33:55 +01008745 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8746 __ mov(result, Operand(lhs, ASR, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008747 // Smi tag result.
Steve Block6ded16b2010-05-10 14:33:55 +01008748 __ bic(result, result, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00008749 break;
8750 case Token::SHR:
8751 // Remove tags from operands. We can't do this on a 31 bit number
8752 // because then the 0s get shifted into bit 30 instead of bit 31.
Steve Block6ded16b2010-05-10 14:33:55 +01008753 __ mov(scratch, Operand(lhs, ASR, kSmiTagSize)); // x
8754 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8755 __ mov(scratch, Operand(scratch, LSR, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008756 // Unsigned shift is not allowed to produce a negative number, so
8757 // check the sign bit and the sign bit after Smi tagging.
Steve Block6ded16b2010-05-10 14:33:55 +01008758 __ tst(scratch, Operand(0xc0000000));
Steve Blocka7e24c12009-10-30 11:49:00 +00008759 __ b(ne, &slow);
8760 // Smi tag result.
Steve Block6ded16b2010-05-10 14:33:55 +01008761 __ mov(result, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008762 break;
8763 case Token::SHL:
8764 // Remove tags from operands.
Steve Block6ded16b2010-05-10 14:33:55 +01008765 __ mov(scratch, Operand(lhs, ASR, kSmiTagSize)); // x
8766 __ GetLeastBitsFromSmi(scratch2, rhs, 5);
8767 __ mov(scratch, Operand(scratch, LSL, scratch2));
Steve Blocka7e24c12009-10-30 11:49:00 +00008768 // Check that the signed result fits in a Smi.
Steve Block6ded16b2010-05-10 14:33:55 +01008769 __ add(scratch2, scratch, Operand(0x40000000), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00008770 __ b(mi, &slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008771 __ mov(result, Operand(scratch, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00008772 break;
8773 default: UNREACHABLE();
8774 }
8775 __ Ret();
8776 __ bind(&slow);
Steve Block6ded16b2010-05-10 14:33:55 +01008777 HandleNonSmiBitwiseOp(masm, lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00008778 break;
8779 }
8780
8781 default: UNREACHABLE();
8782 }
8783 // This code should be unreachable.
8784 __ stop("Unreachable");
Steve Block6ded16b2010-05-10 14:33:55 +01008785
8786 // Generate an unreachable reference to the DEFAULT stub so that it can be
8787 // found at the end of this stub when clearing ICs at GC.
8788 // TODO(kaznacheev): Check performance impact and get rid of this.
8789 if (runtime_operands_type_ != BinaryOpIC::DEFAULT) {
8790 GenericBinaryOpStub uninit(MinorKey(), BinaryOpIC::DEFAULT);
8791 __ CallStub(&uninit);
8792 }
8793}
8794
8795
8796void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
8797 Label get_result;
8798
8799 __ Push(r1, r0);
8800
Leon Clarkeac952652010-07-15 11:15:24 +01008801 __ mov(r2, Operand(Smi::FromInt(MinorKey())));
8802 __ mov(r1, Operand(Smi::FromInt(op_)));
Steve Block6ded16b2010-05-10 14:33:55 +01008803 __ mov(r0, Operand(Smi::FromInt(runtime_operands_type_)));
Leon Clarkeac952652010-07-15 11:15:24 +01008804 __ Push(r2, r1, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01008805
8806 __ TailCallExternalReference(
8807 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
Leon Clarkeac952652010-07-15 11:15:24 +01008808 5,
Steve Block6ded16b2010-05-10 14:33:55 +01008809 1);
Steve Block6ded16b2010-05-10 14:33:55 +01008810}
8811
8812
8813Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
8814 GenericBinaryOpStub stub(key, type_info);
8815 return stub.GetCode();
Steve Blocka7e24c12009-10-30 11:49:00 +00008816}
8817
8818
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008819void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
8820 // Argument is a number and is on stack and in r0.
8821 Label runtime_call;
8822 Label input_not_smi;
8823 Label loaded;
8824
8825 if (CpuFeatures::IsSupported(VFP3)) {
8826 // Load argument and check if it is a smi.
8827 __ BranchOnNotSmi(r0, &input_not_smi);
8828
8829 CpuFeatures::Scope scope(VFP3);
8830 // Input is a smi. Convert to double and load the low and high words
8831 // of the double into r2, r3.
8832 __ IntegerToDoubleConversionWithVFP3(r0, r3, r2);
8833 __ b(&loaded);
8834
8835 __ bind(&input_not_smi);
8836 // Check if input is a HeapNumber.
8837 __ CheckMap(r0,
8838 r1,
8839 Heap::kHeapNumberMapRootIndex,
8840 &runtime_call,
8841 true);
8842 // Input is a HeapNumber. Load it to a double register and store the
8843 // low and high words into r2, r3.
8844 __ Ldrd(r2, r3, FieldMemOperand(r0, HeapNumber::kValueOffset));
8845
8846 __ bind(&loaded);
8847 // r2 = low 32 bits of double value
8848 // r3 = high 32 bits of double value
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008849 // Compute hash (the shifts are arithmetic):
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008850 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
8851 __ eor(r1, r2, Operand(r3));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008852 __ eor(r1, r1, Operand(r1, ASR, 16));
8853 __ eor(r1, r1, Operand(r1, ASR, 8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008854 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008855 __ And(r1, r1, Operand(TranscendentalCache::kCacheSize - 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008856
8857 // r2 = low 32 bits of double value.
8858 // r3 = high 32 bits of double value.
8859 // r1 = TranscendentalCache::hash(double value).
8860 __ mov(r0,
8861 Operand(ExternalReference::transcendental_cache_array_address()));
8862 // r0 points to cache array.
8863 __ ldr(r0, MemOperand(r0, type_ * sizeof(TranscendentalCache::caches_[0])));
8864 // r0 points to the cache for the type type_.
8865 // If NULL, the cache hasn't been initialized yet, so go through runtime.
8866 __ cmp(r0, Operand(0));
8867 __ b(eq, &runtime_call);
8868
8869#ifdef DEBUG
8870 // Check that the layout of cache elements match expectations.
8871 { TranscendentalCache::Element test_elem[2];
8872 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
8873 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
8874 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
8875 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
8876 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
8877 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
8878 CHECK_EQ(0, elem_in0 - elem_start);
8879 CHECK_EQ(kIntSize, elem_in1 - elem_start);
8880 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
8881 }
8882#endif
8883
8884 // Find the address of the r1'st entry in the cache, i.e., &r0[r1*12].
8885 __ add(r1, r1, Operand(r1, LSL, 1));
8886 __ add(r0, r0, Operand(r1, LSL, 2));
8887 // Check if cache matches: Double value is stored in uint32_t[2] array.
8888 __ ldm(ia, r0, r4.bit()| r5.bit() | r6.bit());
8889 __ cmp(r2, r4);
8890 __ b(ne, &runtime_call);
8891 __ cmp(r3, r5);
8892 __ b(ne, &runtime_call);
8893 // Cache hit. Load result, pop argument and return.
8894 __ mov(r0, Operand(r6));
8895 __ pop();
8896 __ Ret();
8897 }
8898
8899 __ bind(&runtime_call);
8900 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
8901}
8902
8903
8904Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
8905 switch (type_) {
8906 // Add more cases when necessary.
8907 case TranscendentalCache::SIN: return Runtime::kMath_sin;
8908 case TranscendentalCache::COS: return Runtime::kMath_cos;
8909 default:
8910 UNIMPLEMENTED();
8911 return Runtime::kAbort;
8912 }
8913}
8914
8915
Steve Blocka7e24c12009-10-30 11:49:00 +00008916void StackCheckStub::Generate(MacroAssembler* masm) {
8917 // Do tail-call to runtime routine. Runtime routines expect at least one
8918 // argument, so give it a Smi.
8919 __ mov(r0, Operand(Smi::FromInt(0)));
8920 __ push(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01008921 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008922
8923 __ StubReturn(1);
8924}
8925
8926
Leon Clarkee46be812010-01-19 14:06:41 +00008927void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +00008928 Label slow, done;
Leon Clarkee46be812010-01-19 14:06:41 +00008929
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008930 Register heap_number_map = r6;
8931 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
8932
Leon Clarke4515c472010-02-03 11:58:03 +00008933 if (op_ == Token::SUB) {
8934 // Check whether the value is a smi.
8935 Label try_float;
8936 __ tst(r0, Operand(kSmiTagMask));
8937 __ b(ne, &try_float);
Steve Blocka7e24c12009-10-30 11:49:00 +00008938
Leon Clarke4515c472010-02-03 11:58:03 +00008939 // Go slow case if the value of the expression is zero
8940 // to make sure that we switch between 0 and -0.
Leon Clarkeac952652010-07-15 11:15:24 +01008941 if (negative_zero_ == kStrictNegativeZero) {
8942 // If we have to check for zero, then we can check for the max negative
8943 // smi while we are at it.
8944 __ bic(ip, r0, Operand(0x80000000), SetCC);
8945 __ b(eq, &slow);
8946 __ rsb(r0, r0, Operand(0));
8947 __ StubReturn(1);
8948 } else {
8949 // The value of the expression is a smi and 0 is OK for -0. Try
8950 // optimistic subtraction '0 - value'.
8951 __ rsb(r0, r0, Operand(0), SetCC);
8952 __ StubReturn(1, vc);
8953 // We don't have to reverse the optimistic neg since the only case
8954 // where we fall through is the minimum negative Smi, which is the case
8955 // where the neg leaves the register unchanged.
8956 __ jmp(&slow); // Go slow on max negative Smi.
8957 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008958
Leon Clarke4515c472010-02-03 11:58:03 +00008959 __ bind(&try_float);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008960 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
8961 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
8962 __ cmp(r1, heap_number_map);
Leon Clarke4515c472010-02-03 11:58:03 +00008963 __ b(ne, &slow);
8964 // r0 is a heap number. Get a new heap number in r1.
Leon Clarkeac952652010-07-15 11:15:24 +01008965 if (overwrite_ == UNARY_OVERWRITE) {
Leon Clarke4515c472010-02-03 11:58:03 +00008966 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8967 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
8968 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8969 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008970 __ AllocateHeapNumber(r1, r2, r3, r6, &slow);
Leon Clarke4515c472010-02-03 11:58:03 +00008971 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
8972 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
8973 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
8974 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
8975 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
8976 __ mov(r0, Operand(r1));
8977 }
8978 } else if (op_ == Token::BIT_NOT) {
8979 // Check if the operand is a heap number.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008980 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
8981 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
8982 __ cmp(r1, heap_number_map);
Leon Clarke4515c472010-02-03 11:58:03 +00008983 __ b(ne, &slow);
8984
8985 // Convert the heap number is r0 to an untagged integer in r1.
8986 GetInt32(masm, r0, r1, r2, r3, &slow);
8987
8988 // Do the bitwise operation (move negated) and check if the result
8989 // fits in a smi.
8990 Label try_float;
8991 __ mvn(r1, Operand(r1));
8992 __ add(r2, r1, Operand(0x40000000), SetCC);
8993 __ b(mi, &try_float);
8994 __ mov(r0, Operand(r1, LSL, kSmiTagSize));
8995 __ b(&done);
8996
8997 __ bind(&try_float);
Leon Clarkeac952652010-07-15 11:15:24 +01008998 if (!overwrite_ == UNARY_OVERWRITE) {
Leon Clarke4515c472010-02-03 11:58:03 +00008999 // Allocate a fresh heap number, but don't overwrite r0 until
9000 // we're sure we can do it without going through the slow case
9001 // that needs the value in r0.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009002 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
Leon Clarke4515c472010-02-03 11:58:03 +00009003 __ mov(r0, Operand(r2));
9004 }
9005
Steve Block8defd9f2010-07-08 12:39:36 +01009006 if (CpuFeatures::IsSupported(VFP3)) {
9007 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted.
9008 CpuFeatures::Scope scope(VFP3);
9009 __ vmov(s0, r1);
9010 __ vcvt_f64_s32(d0, s0);
9011 __ sub(r2, r0, Operand(kHeapObjectTag));
9012 __ vstr(d0, r2, HeapNumber::kValueOffset);
9013 } else {
9014 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not
9015 // have to set up a frame.
9016 WriteInt32ToHeapNumberStub stub(r1, r0, r2);
9017 __ push(lr);
9018 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
9019 __ pop(lr);
9020 }
Leon Clarke4515c472010-02-03 11:58:03 +00009021 } else {
9022 UNIMPLEMENTED();
9023 }
9024
9025 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00009026 __ StubReturn(1);
9027
Leon Clarke4515c472010-02-03 11:58:03 +00009028 // Handle the slow case by jumping to the JavaScript builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00009029 __ bind(&slow);
9030 __ push(r0);
Leon Clarke4515c472010-02-03 11:58:03 +00009031 switch (op_) {
9032 case Token::SUB:
9033 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
9034 break;
9035 case Token::BIT_NOT:
9036 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS);
9037 break;
9038 default:
9039 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00009040 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009041}
9042
9043
9044void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9045 // r0 holds the exception.
9046
9047 // Adjust this code if not the case.
9048 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9049
9050 // Drop the sp to the top of the handler.
9051 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
9052 __ ldr(sp, MemOperand(r3));
9053
9054 // Restore the next handler and frame pointer, discard handler state.
9055 ASSERT(StackHandlerConstants::kNextOffset == 0);
9056 __ pop(r2);
9057 __ str(r2, MemOperand(r3));
9058 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
9059 __ ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
9060
9061 // Before returning we restore the context from the frame pointer if
9062 // not NULL. The frame pointer is NULL in the exception handler of a
9063 // JS entry frame.
9064 __ cmp(fp, Operand(0));
9065 // Set cp to NULL if fp is NULL.
9066 __ mov(cp, Operand(0), LeaveCC, eq);
9067 // Restore cp otherwise.
9068 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
9069#ifdef DEBUG
9070 if (FLAG_debug_code) {
9071 __ mov(lr, Operand(pc));
9072 }
9073#endif
9074 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9075 __ pop(pc);
9076}
9077
9078
9079void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
9080 UncatchableExceptionType type) {
9081 // Adjust this code if not the case.
9082 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9083
9084 // Drop sp to the top stack handler.
9085 __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
9086 __ ldr(sp, MemOperand(r3));
9087
9088 // Unwind the handlers until the ENTRY handler is found.
9089 Label loop, done;
9090 __ bind(&loop);
9091 // Load the type of the current stack handler.
9092 const int kStateOffset = StackHandlerConstants::kStateOffset;
9093 __ ldr(r2, MemOperand(sp, kStateOffset));
9094 __ cmp(r2, Operand(StackHandler::ENTRY));
9095 __ b(eq, &done);
9096 // Fetch the next handler in the list.
9097 const int kNextOffset = StackHandlerConstants::kNextOffset;
9098 __ ldr(sp, MemOperand(sp, kNextOffset));
9099 __ jmp(&loop);
9100 __ bind(&done);
9101
9102 // Set the top handler address to next handler past the current ENTRY handler.
9103 ASSERT(StackHandlerConstants::kNextOffset == 0);
9104 __ pop(r2);
9105 __ str(r2, MemOperand(r3));
9106
9107 if (type == OUT_OF_MEMORY) {
9108 // Set external caught exception to false.
9109 ExternalReference external_caught(Top::k_external_caught_exception_address);
9110 __ mov(r0, Operand(false));
9111 __ mov(r2, Operand(external_caught));
9112 __ str(r0, MemOperand(r2));
9113
9114 // Set pending exception and r0 to out of memory exception.
9115 Failure* out_of_memory = Failure::OutOfMemoryException();
9116 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
9117 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
9118 __ str(r0, MemOperand(r2));
9119 }
9120
9121 // Stack layout at this point. See also StackHandlerConstants.
9122 // sp -> state (ENTRY)
9123 // fp
9124 // lr
9125
9126 // Discard handler state (r2 is not used) and restore frame pointer.
9127 ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
9128 __ ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
9129 // Before returning we restore the context from the frame pointer if
9130 // not NULL. The frame pointer is NULL in the exception handler of a
9131 // JS entry frame.
9132 __ cmp(fp, Operand(0));
9133 // Set cp to NULL if fp is NULL.
9134 __ mov(cp, Operand(0), LeaveCC, eq);
9135 // Restore cp otherwise.
9136 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
9137#ifdef DEBUG
9138 if (FLAG_debug_code) {
9139 __ mov(lr, Operand(pc));
9140 }
9141#endif
9142 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9143 __ pop(pc);
9144}
9145
9146
9147void CEntryStub::GenerateCore(MacroAssembler* masm,
9148 Label* throw_normal_exception,
9149 Label* throw_termination_exception,
9150 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009151 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +01009152 bool always_allocate,
9153 int frame_alignment_skew) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009154 // r0: result parameter for PerformGC, if any
9155 // r4: number of arguments including receiver (C callee-saved)
9156 // r5: pointer to builtin function (C callee-saved)
9157 // r6: pointer to the first argument (C callee-saved)
9158
9159 if (do_gc) {
9160 // Passing r0.
Steve Block6ded16b2010-05-10 14:33:55 +01009161 __ PrepareCallCFunction(1, r1);
9162 __ CallCFunction(ExternalReference::perform_gc_function(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009163 }
9164
9165 ExternalReference scope_depth =
9166 ExternalReference::heap_always_allocate_scope_depth();
9167 if (always_allocate) {
9168 __ mov(r0, Operand(scope_depth));
9169 __ ldr(r1, MemOperand(r0));
9170 __ add(r1, r1, Operand(1));
9171 __ str(r1, MemOperand(r0));
9172 }
9173
9174 // Call C built-in.
9175 // r0 = argc, r1 = argv
9176 __ mov(r0, Operand(r4));
9177 __ mov(r1, Operand(r6));
9178
Steve Block6ded16b2010-05-10 14:33:55 +01009179 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
9180 int frame_alignment_mask = frame_alignment - 1;
9181#if defined(V8_HOST_ARCH_ARM)
9182 if (FLAG_debug_code) {
9183 if (frame_alignment > kPointerSize) {
9184 Label alignment_as_expected;
9185 ASSERT(IsPowerOf2(frame_alignment));
9186 __ sub(r2, sp, Operand(frame_alignment_skew));
9187 __ tst(r2, Operand(frame_alignment_mask));
9188 __ b(eq, &alignment_as_expected);
9189 // Don't use Check here, as it will call Runtime_Abort re-entering here.
9190 __ stop("Unexpected alignment");
9191 __ bind(&alignment_as_expected);
9192 }
9193 }
9194#endif
9195
9196 // Just before the call (jump) below lr is pushed, so the actual alignment is
9197 // adding one to the current skew.
9198 int alignment_before_call =
9199 (frame_alignment_skew + kPointerSize) & frame_alignment_mask;
9200 if (alignment_before_call > 0) {
9201 // Push until the alignment before the call is met.
9202 __ mov(r2, Operand(0));
9203 for (int i = alignment_before_call;
9204 (i & frame_alignment_mask) != 0;
9205 i += kPointerSize) {
9206 __ push(r2);
9207 }
9208 }
9209
Steve Blocka7e24c12009-10-30 11:49:00 +00009210 // TODO(1242173): To let the GC traverse the return address of the exit
9211 // frames, we need to know where the return address is. Right now,
9212 // we push it on the stack to be able to find it again, but we never
9213 // restore from it in case of changes, which makes it impossible to
9214 // support moving the C entry code stub. This should be fixed, but currently
9215 // this is OK because the CEntryStub gets generated so early in the V8 boot
9216 // sequence that it is not moving ever.
Steve Block6ded16b2010-05-10 14:33:55 +01009217 masm->add(lr, pc, Operand(4)); // Compute return address: (pc + 8) + 4
Steve Blocka7e24c12009-10-30 11:49:00 +00009218 masm->push(lr);
9219 masm->Jump(r5);
9220
Steve Block6ded16b2010-05-10 14:33:55 +01009221 // Restore sp back to before aligning the stack.
9222 if (alignment_before_call > 0) {
9223 __ add(sp, sp, Operand(alignment_before_call));
9224 }
9225
Steve Blocka7e24c12009-10-30 11:49:00 +00009226 if (always_allocate) {
9227 // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
9228 // though (contain the result).
9229 __ mov(r2, Operand(scope_depth));
9230 __ ldr(r3, MemOperand(r2));
9231 __ sub(r3, r3, Operand(1));
9232 __ str(r3, MemOperand(r2));
9233 }
9234
9235 // check for failure result
9236 Label failure_returned;
9237 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
9238 // Lower 2 bits of r2 are 0 iff r0 has failure tag.
9239 __ add(r2, r0, Operand(1));
9240 __ tst(r2, Operand(kFailureTagMask));
9241 __ b(eq, &failure_returned);
9242
9243 // Exit C frame and return.
9244 // r0:r1: result
9245 // sp: stack pointer
9246 // fp: frame pointer
Leon Clarke4515c472010-02-03 11:58:03 +00009247 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009248
9249 // check if we should retry or throw exception
9250 Label retry;
9251 __ bind(&failure_returned);
9252 ASSERT(Failure::RETRY_AFTER_GC == 0);
9253 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
9254 __ b(eq, &retry);
9255
9256 // Special handling of out of memory exceptions.
9257 Failure* out_of_memory = Failure::OutOfMemoryException();
9258 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
9259 __ b(eq, throw_out_of_memory_exception);
9260
9261 // Retrieve the pending exception and clear the variable.
9262 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
9263 __ ldr(r3, MemOperand(ip));
9264 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
9265 __ ldr(r0, MemOperand(ip));
9266 __ str(r3, MemOperand(ip));
9267
9268 // Special handling of termination exceptions which are uncatchable
9269 // by javascript code.
9270 __ cmp(r0, Operand(Factory::termination_exception()));
9271 __ b(eq, throw_termination_exception);
9272
9273 // Handle normal exception.
9274 __ jmp(throw_normal_exception);
9275
9276 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
9277}
9278
9279
Leon Clarke4515c472010-02-03 11:58:03 +00009280void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009281 // Called from JavaScript; parameters are on stack as if calling JS function
9282 // r0: number of arguments including receiver
9283 // r1: pointer to builtin function
9284 // fp: frame pointer (restored after C call)
9285 // sp: stack pointer (restored as callee's sp after C call)
9286 // cp: current context (C callee-saved)
9287
Leon Clarke4515c472010-02-03 11:58:03 +00009288 // Result returned in r0 or r0+r1 by default.
9289
Steve Blocka7e24c12009-10-30 11:49:00 +00009290 // NOTE: Invocations of builtins may return failure objects
9291 // instead of a proper result. The builtin entry handles
9292 // this by performing a garbage collection and retrying the
9293 // builtin once.
9294
Steve Blocka7e24c12009-10-30 11:49:00 +00009295 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +00009296 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009297
9298 // r4: number of arguments (C callee-saved)
9299 // r5: pointer to builtin function (C callee-saved)
9300 // r6: pointer to first argument (C callee-saved)
9301
9302 Label throw_normal_exception;
9303 Label throw_termination_exception;
9304 Label throw_out_of_memory_exception;
9305
9306 // Call into the runtime system.
9307 GenerateCore(masm,
9308 &throw_normal_exception,
9309 &throw_termination_exception,
9310 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009311 false,
Steve Block6ded16b2010-05-10 14:33:55 +01009312 false,
9313 -kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00009314
9315 // Do space-specific GC and retry runtime call.
9316 GenerateCore(masm,
9317 &throw_normal_exception,
9318 &throw_termination_exception,
9319 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009320 true,
Steve Block6ded16b2010-05-10 14:33:55 +01009321 false,
9322 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00009323
9324 // Do full GC and retry runtime call one final time.
9325 Failure* failure = Failure::InternalError();
9326 __ mov(r0, Operand(reinterpret_cast<int32_t>(failure)));
9327 GenerateCore(masm,
9328 &throw_normal_exception,
9329 &throw_termination_exception,
9330 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009331 true,
Steve Block6ded16b2010-05-10 14:33:55 +01009332 true,
9333 kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00009334
9335 __ bind(&throw_out_of_memory_exception);
9336 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
9337
9338 __ bind(&throw_termination_exception);
9339 GenerateThrowUncatchable(masm, TERMINATION);
9340
9341 __ bind(&throw_normal_exception);
9342 GenerateThrowTOS(masm);
9343}
9344
9345
9346void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
9347 // r0: code entry
9348 // r1: function
9349 // r2: receiver
9350 // r3: argc
9351 // [sp+0]: argv
9352
9353 Label invoke, exit;
9354
9355 // Called from C, so do not pop argc and args on exit (preserve sp)
9356 // No need to save register-passed args
9357 // Save callee-saved registers (incl. cp and fp), sp, and lr
9358 __ stm(db_w, sp, kCalleeSaved | lr.bit());
9359
9360 // Get address of argv, see stm above.
9361 // r0: code entry
9362 // r1: function
9363 // r2: receiver
9364 // r3: argc
Leon Clarke4515c472010-02-03 11:58:03 +00009365 __ ldr(r4, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize)); // argv
Steve Blocka7e24c12009-10-30 11:49:00 +00009366
9367 // Push a frame with special values setup to mark it as an entry frame.
9368 // r0: code entry
9369 // r1: function
9370 // r2: receiver
9371 // r3: argc
9372 // r4: argv
9373 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
9374 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
9375 __ mov(r7, Operand(Smi::FromInt(marker)));
9376 __ mov(r6, Operand(Smi::FromInt(marker)));
9377 __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
9378 __ ldr(r5, MemOperand(r5));
Steve Block6ded16b2010-05-10 14:33:55 +01009379 __ Push(r8, r7, r6, r5);
Steve Blocka7e24c12009-10-30 11:49:00 +00009380
9381 // Setup frame pointer for the frame to be pushed.
9382 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
9383
9384 // Call a faked try-block that does the invoke.
9385 __ bl(&invoke);
9386
9387 // Caught exception: Store result (exception) in the pending
9388 // exception field in the JSEnv and return a failure sentinel.
9389 // Coming in here the fp will be invalid because the PushTryHandler below
9390 // sets it to 0 to signal the existence of the JSEntry frame.
9391 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
9392 __ str(r0, MemOperand(ip));
9393 __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
9394 __ b(&exit);
9395
9396 // Invoke: Link this frame into the handler chain.
9397 __ bind(&invoke);
9398 // Must preserve r0-r4, r5-r7 are available.
9399 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
9400 // If an exception not caught by another handler occurs, this handler
9401 // returns control to the code after the bl(&invoke) above, which
9402 // restores all kCalleeSaved registers (including cp and fp) to their
9403 // saved values before returning a failure to C.
9404
9405 // Clear any pending exceptions.
9406 __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
9407 __ ldr(r5, MemOperand(ip));
9408 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address)));
9409 __ str(r5, MemOperand(ip));
9410
9411 // Invoke the function by calling through JS entry trampoline builtin.
9412 // Notice that we cannot store a reference to the trampoline code directly in
9413 // this stub, because runtime stubs are not traversed when doing GC.
9414
9415 // Expected registers by Builtins::JSEntryTrampoline
9416 // r0: code entry
9417 // r1: function
9418 // r2: receiver
9419 // r3: argc
9420 // r4: argv
9421 if (is_construct) {
9422 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
9423 __ mov(ip, Operand(construct_entry));
9424 } else {
9425 ExternalReference entry(Builtins::JSEntryTrampoline);
9426 __ mov(ip, Operand(entry));
9427 }
9428 __ ldr(ip, MemOperand(ip)); // deref address
9429
9430 // Branch and link to JSEntryTrampoline. We don't use the double underscore
9431 // macro for the add instruction because we don't want the coverage tool
9432 // inserting instructions here after we read the pc.
9433 __ mov(lr, Operand(pc));
9434 masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
9435
9436 // Unlink this frame from the handler chain. When reading the
9437 // address of the next handler, there is no need to use the address
9438 // displacement since the current stack pointer (sp) points directly
9439 // to the stack handler.
9440 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
9441 __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
9442 __ str(r3, MemOperand(ip));
9443 // No need to restore registers
9444 __ add(sp, sp, Operand(StackHandlerConstants::kSize));
9445
9446
9447 __ bind(&exit); // r0 holds result
9448 // Restore the top frame descriptors from the stack.
9449 __ pop(r3);
9450 __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
9451 __ str(r3, MemOperand(ip));
9452
9453 // Reset the stack to the callee saved registers.
9454 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
9455
9456 // Restore callee-saved registers and return.
9457#ifdef DEBUG
9458 if (FLAG_debug_code) {
9459 __ mov(lr, Operand(pc));
9460 }
9461#endif
9462 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
9463}
9464
9465
9466// This stub performs an instanceof, calling the builtin function if
9467// necessary. Uses r1 for the object, r0 for the function that it may
9468// be an instance of (these are fetched from the stack).
9469void InstanceofStub::Generate(MacroAssembler* masm) {
9470 // Get the object - slow case for smis (we may need to throw an exception
9471 // depending on the rhs).
9472 Label slow, loop, is_instance, is_not_instance;
9473 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
9474 __ BranchOnSmi(r0, &slow);
9475
9476 // Check that the left hand is a JS object and put map in r3.
9477 __ CompareObjectType(r0, r3, r2, FIRST_JS_OBJECT_TYPE);
9478 __ b(lt, &slow);
9479 __ cmp(r2, Operand(LAST_JS_OBJECT_TYPE));
9480 __ b(gt, &slow);
9481
9482 // Get the prototype of the function (r4 is result, r2 is scratch).
Andrei Popescu402d9372010-02-26 13:31:12 +00009483 __ ldr(r1, MemOperand(sp, 0));
Kristian Monsen25f61362010-05-21 11:50:48 +01009484 // r1 is function, r3 is map.
9485
9486 // Look up the function and the map in the instanceof cache.
9487 Label miss;
9488 __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
9489 __ cmp(r1, ip);
9490 __ b(ne, &miss);
9491 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
9492 __ cmp(r3, ip);
9493 __ b(ne, &miss);
9494 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
9495 __ pop();
9496 __ pop();
9497 __ mov(pc, Operand(lr));
9498
9499 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00009500 __ TryGetFunctionPrototype(r1, r4, r2, &slow);
9501
9502 // Check that the function prototype is a JS object.
9503 __ BranchOnSmi(r4, &slow);
9504 __ CompareObjectType(r4, r5, r5, FIRST_JS_OBJECT_TYPE);
9505 __ b(lt, &slow);
9506 __ cmp(r5, Operand(LAST_JS_OBJECT_TYPE));
9507 __ b(gt, &slow);
9508
Kristian Monsen25f61362010-05-21 11:50:48 +01009509 __ StoreRoot(r1, Heap::kInstanceofCacheFunctionRootIndex);
9510 __ StoreRoot(r3, Heap::kInstanceofCacheMapRootIndex);
9511
Steve Blocka7e24c12009-10-30 11:49:00 +00009512 // Register mapping: r3 is object map and r4 is function prototype.
9513 // Get prototype of object into r2.
9514 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset));
9515
9516 // Loop through the prototype chain looking for the function prototype.
9517 __ bind(&loop);
9518 __ cmp(r2, Operand(r4));
9519 __ b(eq, &is_instance);
9520 __ LoadRoot(ip, Heap::kNullValueRootIndex);
9521 __ cmp(r2, ip);
9522 __ b(eq, &is_not_instance);
9523 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
9524 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
9525 __ jmp(&loop);
9526
9527 __ bind(&is_instance);
9528 __ mov(r0, Operand(Smi::FromInt(0)));
Kristian Monsen25f61362010-05-21 11:50:48 +01009529 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00009530 __ pop();
9531 __ pop();
9532 __ mov(pc, Operand(lr)); // Return.
9533
9534 __ bind(&is_not_instance);
9535 __ mov(r0, Operand(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +01009536 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00009537 __ pop();
9538 __ pop();
9539 __ mov(pc, Operand(lr)); // Return.
9540
9541 // Slow-case. Tail call builtin.
9542 __ bind(&slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009543 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
9544}
9545
9546
Steve Blocka7e24c12009-10-30 11:49:00 +00009547void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
9548 // The displacement is the offset of the last parameter (if any)
9549 // relative to the frame pointer.
9550 static const int kDisplacement =
9551 StandardFrameConstants::kCallerSPOffset - kPointerSize;
9552
9553 // Check that the key is a smi.
9554 Label slow;
9555 __ BranchOnNotSmi(r1, &slow);
9556
9557 // Check if the calling frame is an arguments adaptor frame.
9558 Label adaptor;
9559 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
9560 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
9561 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
9562 __ b(eq, &adaptor);
9563
9564 // Check index against formal parameters count limit passed in
Steve Blockd0582a62009-12-15 09:54:21 +00009565 // through register r0. Use unsigned comparison to get negative
Steve Blocka7e24c12009-10-30 11:49:00 +00009566 // check for free.
9567 __ cmp(r1, r0);
9568 __ b(cs, &slow);
9569
9570 // Read the argument from the stack and return it.
9571 __ sub(r3, r0, r1);
9572 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
9573 __ ldr(r0, MemOperand(r3, kDisplacement));
9574 __ Jump(lr);
9575
9576 // Arguments adaptor case: Check index against actual arguments
9577 // limit found in the arguments adaptor frame. Use unsigned
9578 // comparison to get negative check for free.
9579 __ bind(&adaptor);
9580 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
9581 __ cmp(r1, r0);
9582 __ b(cs, &slow);
9583
9584 // Read the argument from the adaptor frame and return it.
9585 __ sub(r3, r0, r1);
9586 __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
9587 __ ldr(r0, MemOperand(r3, kDisplacement));
9588 __ Jump(lr);
9589
9590 // Slow-case: Handle non-smi or out-of-bounds access to arguments
9591 // by calling the runtime system.
9592 __ bind(&slow);
9593 __ push(r1);
Steve Block6ded16b2010-05-10 14:33:55 +01009594 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009595}
9596
9597
9598void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009599 // sp[0] : number of parameters
9600 // sp[4] : receiver displacement
9601 // sp[8] : function
9602
Steve Blocka7e24c12009-10-30 11:49:00 +00009603 // Check if the calling frame is an arguments adaptor frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00009604 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +00009605 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
9606 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
9607 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Andrei Popescu402d9372010-02-26 13:31:12 +00009608 __ b(eq, &adaptor_frame);
9609
9610 // Get the length from the frame.
9611 __ ldr(r1, MemOperand(sp, 0));
9612 __ b(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +00009613
9614 // Patch the arguments.length and the parameters pointer.
Andrei Popescu402d9372010-02-26 13:31:12 +00009615 __ bind(&adaptor_frame);
9616 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
9617 __ str(r1, MemOperand(sp, 0));
9618 __ add(r3, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00009619 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
9620 __ str(r3, MemOperand(sp, 1 * kPointerSize));
9621
Andrei Popescu402d9372010-02-26 13:31:12 +00009622 // Try the new space allocation. Start out with computing the size
Kristian Monsen25f61362010-05-21 11:50:48 +01009623 // of the arguments object and the elements array in words.
Andrei Popescu402d9372010-02-26 13:31:12 +00009624 Label add_arguments_object;
9625 __ bind(&try_allocate);
9626 __ cmp(r1, Operand(0));
9627 __ b(eq, &add_arguments_object);
9628 __ mov(r1, Operand(r1, LSR, kSmiTagSize));
9629 __ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
9630 __ bind(&add_arguments_object);
9631 __ add(r1, r1, Operand(Heap::kArgumentsObjectSize / kPointerSize));
9632
9633 // Do the allocation of both objects in one go.
Kristian Monsen25f61362010-05-21 11:50:48 +01009634 __ AllocateInNewSpace(
9635 r1,
9636 r0,
9637 r2,
9638 r3,
9639 &runtime,
9640 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Andrei Popescu402d9372010-02-26 13:31:12 +00009641
9642 // Get the arguments boilerplate from the current (global) context.
9643 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
9644 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
9645 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
9646 __ ldr(r4, MemOperand(r4, offset));
9647
9648 // Copy the JS object part.
9649 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
9650 __ ldr(r3, FieldMemOperand(r4, i));
9651 __ str(r3, FieldMemOperand(r0, i));
9652 }
9653
9654 // Setup the callee in-object property.
9655 ASSERT(Heap::arguments_callee_index == 0);
9656 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
9657 __ str(r3, FieldMemOperand(r0, JSObject::kHeaderSize));
9658
9659 // Get the length (smi tagged) and set that as an in-object property too.
9660 ASSERT(Heap::arguments_length_index == 1);
9661 __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
9662 __ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize + kPointerSize));
9663
9664 // If there are no actual arguments, we're done.
9665 Label done;
9666 __ cmp(r1, Operand(0));
9667 __ b(eq, &done);
9668
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009669 // Get the parameters pointer from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00009670 __ ldr(r2, MemOperand(sp, 1 * kPointerSize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009671
9672 // Setup the elements pointer in the allocated arguments object and
9673 // initialize the header in the elements fixed array.
9674 __ add(r4, r0, Operand(Heap::kArgumentsObjectSize));
9675 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
9676 __ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
9677 __ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
9678 __ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009679 __ mov(r1, Operand(r1, LSR, kSmiTagSize)); // Untag the length for the loop.
Andrei Popescu402d9372010-02-26 13:31:12 +00009680
9681 // Copy the fixed array slots.
9682 Label loop;
9683 // Setup r4 to point to the first array slot.
9684 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
9685 __ bind(&loop);
9686 // Pre-decrement r2 with kPointerSize on each iteration.
9687 // Pre-decrement in order to skip receiver.
9688 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
9689 // Post-increment r4 with kPointerSize on each iteration.
9690 __ str(r3, MemOperand(r4, kPointerSize, PostIndex));
9691 __ sub(r1, r1, Operand(1));
9692 __ cmp(r1, Operand(0));
9693 __ b(ne, &loop);
9694
9695 // Return and remove the on-stack parameters.
9696 __ bind(&done);
9697 __ add(sp, sp, Operand(3 * kPointerSize));
9698 __ Ret();
9699
Steve Blocka7e24c12009-10-30 11:49:00 +00009700 // Do the runtime call to allocate the arguments object.
9701 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009702 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
9703}
9704
9705
9706void RegExpExecStub::Generate(MacroAssembler* masm) {
9707 // Just jump directly to runtime if native RegExp is not selected at compile
9708 // time or if regexp entry in generated code is turned off runtime switch or
9709 // at compilation.
Kristian Monsen25f61362010-05-21 11:50:48 +01009710#ifdef V8_INTERPRETED_REGEXP
Steve Block6ded16b2010-05-10 14:33:55 +01009711 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01009712#else // V8_INTERPRETED_REGEXP
Steve Block6ded16b2010-05-10 14:33:55 +01009713 if (!FLAG_regexp_entry_native) {
9714 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
9715 return;
9716 }
9717
9718 // Stack frame on entry.
9719 // sp[0]: last_match_info (expected JSArray)
9720 // sp[4]: previous index
9721 // sp[8]: subject string
9722 // sp[12]: JSRegExp object
9723
9724 static const int kLastMatchInfoOffset = 0 * kPointerSize;
9725 static const int kPreviousIndexOffset = 1 * kPointerSize;
9726 static const int kSubjectOffset = 2 * kPointerSize;
9727 static const int kJSRegExpOffset = 3 * kPointerSize;
9728
9729 Label runtime, invoke_regexp;
9730
9731 // Allocation of registers for this function. These are in callee save
9732 // registers and will be preserved by the call to the native RegExp code, as
9733 // this code is called using the normal C calling convention. When calling
9734 // directly from generated code the native RegExp code will not do a GC and
9735 // therefore the content of these registers are safe to use after the call.
9736 Register subject = r4;
9737 Register regexp_data = r5;
9738 Register last_match_info_elements = r6;
9739
9740 // Ensure that a RegExp stack is allocated.
9741 ExternalReference address_of_regexp_stack_memory_address =
9742 ExternalReference::address_of_regexp_stack_memory_address();
9743 ExternalReference address_of_regexp_stack_memory_size =
9744 ExternalReference::address_of_regexp_stack_memory_size();
9745 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
9746 __ ldr(r0, MemOperand(r0, 0));
9747 __ tst(r0, Operand(r0));
9748 __ b(eq, &runtime);
9749
9750 // Check that the first argument is a JSRegExp object.
9751 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
9752 ASSERT_EQ(0, kSmiTag);
9753 __ tst(r0, Operand(kSmiTagMask));
9754 __ b(eq, &runtime);
9755 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
9756 __ b(ne, &runtime);
9757
9758 // Check that the RegExp has been compiled (data contains a fixed array).
9759 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
9760 if (FLAG_debug_code) {
9761 __ tst(regexp_data, Operand(kSmiTagMask));
9762 __ Check(nz, "Unexpected type for RegExp data, FixedArray expected");
9763 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
9764 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
9765 }
9766
9767 // regexp_data: RegExp data (FixedArray)
9768 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
9769 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
9770 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
9771 __ b(ne, &runtime);
9772
9773 // regexp_data: RegExp data (FixedArray)
9774 // Check that the number of captures fit in the static offsets vector buffer.
9775 __ ldr(r2,
9776 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
9777 // Calculate number of capture registers (number_of_captures + 1) * 2. This
9778 // uses the asumption that smis are 2 * their untagged value.
9779 ASSERT_EQ(0, kSmiTag);
9780 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
9781 __ add(r2, r2, Operand(2)); // r2 was a smi.
9782 // Check that the static offsets vector buffer is large enough.
9783 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize));
9784 __ b(hi, &runtime);
9785
9786 // r2: Number of capture registers
9787 // regexp_data: RegExp data (FixedArray)
9788 // Check that the second argument is a string.
9789 __ ldr(subject, MemOperand(sp, kSubjectOffset));
9790 __ tst(subject, Operand(kSmiTagMask));
9791 __ b(eq, &runtime);
9792 Condition is_string = masm->IsObjectStringType(subject, r0);
9793 __ b(NegateCondition(is_string), &runtime);
9794 // Get the length of the string to r3.
9795 __ ldr(r3, FieldMemOperand(subject, String::kLengthOffset));
9796
9797 // r2: Number of capture registers
9798 // r3: Length of subject string as a smi
9799 // subject: Subject string
9800 // regexp_data: RegExp data (FixedArray)
9801 // Check that the third argument is a positive smi less than the subject
9802 // string length. A negative value will be greater (unsigned comparison).
9803 __ ldr(r0, MemOperand(sp, kPreviousIndexOffset));
9804 __ tst(r0, Operand(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +01009805 __ b(ne, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009806 __ cmp(r3, Operand(r0));
Kristian Monsen25f61362010-05-21 11:50:48 +01009807 __ b(ls, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009808
9809 // r2: Number of capture registers
9810 // subject: Subject string
9811 // regexp_data: RegExp data (FixedArray)
9812 // Check that the fourth object is a JSArray object.
9813 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
9814 __ tst(r0, Operand(kSmiTagMask));
9815 __ b(eq, &runtime);
9816 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
9817 __ b(ne, &runtime);
9818 // Check that the JSArray is in fast case.
9819 __ ldr(last_match_info_elements,
9820 FieldMemOperand(r0, JSArray::kElementsOffset));
9821 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009822 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01009823 __ cmp(r0, ip);
9824 __ b(ne, &runtime);
9825 // Check that the last match info has space for the capture registers and the
9826 // additional information.
9827 __ ldr(r0,
9828 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
9829 __ add(r2, r2, Operand(RegExpImpl::kLastMatchOverhead));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009830 __ cmp(r2, Operand(r0, ASR, kSmiTagSize));
Steve Block6ded16b2010-05-10 14:33:55 +01009831 __ b(gt, &runtime);
9832
9833 // subject: Subject string
9834 // regexp_data: RegExp data (FixedArray)
9835 // Check the representation and encoding of the subject string.
9836 Label seq_string;
Steve Block6ded16b2010-05-10 14:33:55 +01009837 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9838 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009839 // First check for flat string.
9840 __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask));
9841 ASSERT_EQ(0, kStringTag | kSeqStringTag);
Steve Block6ded16b2010-05-10 14:33:55 +01009842 __ b(eq, &seq_string);
9843
9844 // subject: Subject string
9845 // regexp_data: RegExp data (FixedArray)
9846 // Check for flat cons string.
9847 // A flat cons string is a cons string where the second part is the empty
9848 // string. In that case the subject string is just the first part of the cons
9849 // string. Also in this case the first part of the cons string is known to be
9850 // a sequential string or an external string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009851 ASSERT(kExternalStringTag !=0);
9852 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
9853 __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag));
Steve Block6ded16b2010-05-10 14:33:55 +01009854 __ b(ne, &runtime);
9855 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
9856 __ LoadRoot(r1, Heap::kEmptyStringRootIndex);
9857 __ cmp(r0, r1);
9858 __ b(ne, &runtime);
9859 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
9860 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
9861 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009862 // Is first part a flat string?
Steve Block6ded16b2010-05-10 14:33:55 +01009863 ASSERT_EQ(0, kSeqStringTag);
9864 __ tst(r0, Operand(kStringRepresentationMask));
9865 __ b(nz, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009866
9867 __ bind(&seq_string);
Steve Block6ded16b2010-05-10 14:33:55 +01009868 // subject: Subject string
9869 // regexp_data: RegExp data (FixedArray)
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009870 // r0: Instance type of subject string
9871 ASSERT_EQ(4, kAsciiStringTag);
9872 ASSERT_EQ(0, kTwoByteStringTag);
Steve Block6ded16b2010-05-10 14:33:55 +01009873 // Find the code object based on the assumptions above.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009874 __ and_(r0, r0, Operand(kStringEncodingMask));
9875 __ mov(r3, Operand(r0, ASR, 2), SetCC);
Steve Block6ded16b2010-05-10 14:33:55 +01009876 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset), ne);
9877 __ ldr(r7, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
9878
9879 // Check that the irregexp code has been generated for the actual string
9880 // encoding. If it has, the field contains a code object otherwise it contains
9881 // the hole.
9882 __ CompareObjectType(r7, r0, r0, CODE_TYPE);
9883 __ b(ne, &runtime);
9884
9885 // r3: encoding of subject string (1 if ascii, 0 if two_byte);
9886 // r7: code
9887 // subject: Subject string
9888 // regexp_data: RegExp data (FixedArray)
9889 // Load used arguments before starting to push arguments for call to native
9890 // RegExp code to avoid handling changing stack height.
9891 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset));
9892 __ mov(r1, Operand(r1, ASR, kSmiTagSize));
9893
9894 // r1: previous index
9895 // r3: encoding of subject string (1 if ascii, 0 if two_byte);
9896 // r7: code
9897 // subject: Subject string
9898 // regexp_data: RegExp data (FixedArray)
9899 // All checks done. Now push arguments for native regexp code.
9900 __ IncrementCounter(&Counters::regexp_entry_native, 1, r0, r2);
9901
9902 static const int kRegExpExecuteArguments = 7;
9903 __ push(lr);
9904 __ PrepareCallCFunction(kRegExpExecuteArguments, r0);
9905
9906 // Argument 7 (sp[8]): Indicate that this is a direct call from JavaScript.
9907 __ mov(r0, Operand(1));
9908 __ str(r0, MemOperand(sp, 2 * kPointerSize));
9909
9910 // Argument 6 (sp[4]): Start (high end) of backtracking stack memory area.
9911 __ mov(r0, Operand(address_of_regexp_stack_memory_address));
9912 __ ldr(r0, MemOperand(r0, 0));
9913 __ mov(r2, Operand(address_of_regexp_stack_memory_size));
9914 __ ldr(r2, MemOperand(r2, 0));
9915 __ add(r0, r0, Operand(r2));
9916 __ str(r0, MemOperand(sp, 1 * kPointerSize));
9917
9918 // Argument 5 (sp[0]): static offsets vector buffer.
9919 __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector()));
9920 __ str(r0, MemOperand(sp, 0 * kPointerSize));
9921
9922 // For arguments 4 and 3 get string length, calculate start of string data and
9923 // calculate the shift of the index (0 for ASCII and 1 for two byte).
9924 __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
9925 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
9926 ASSERT_EQ(SeqAsciiString::kHeaderSize, SeqTwoByteString::kHeaderSize);
9927 __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9928 __ eor(r3, r3, Operand(1));
9929 // Argument 4 (r3): End of string data
9930 // Argument 3 (r2): Start of string data
9931 __ add(r2, r9, Operand(r1, LSL, r3));
9932 __ add(r3, r9, Operand(r0, LSL, r3));
9933
9934 // Argument 2 (r1): Previous index.
9935 // Already there
9936
9937 // Argument 1 (r0): Subject string.
9938 __ mov(r0, subject);
9939
9940 // Locate the code entry and call it.
9941 __ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
9942 __ CallCFunction(r7, kRegExpExecuteArguments);
9943 __ pop(lr);
9944
9945 // r0: result
9946 // subject: subject string (callee saved)
9947 // regexp_data: RegExp data (callee saved)
9948 // last_match_info_elements: Last match info elements (callee saved)
9949
9950 // Check the result.
9951 Label success;
9952 __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
9953 __ b(eq, &success);
9954 Label failure;
9955 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
9956 __ b(eq, &failure);
9957 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
9958 // If not exception it can only be retry. Handle that in the runtime system.
9959 __ b(ne, &runtime);
9960 // Result must now be exception. If there is no pending exception already a
9961 // stack overflow (on the backtrack stack) was detected in RegExp code but
9962 // haven't created the exception yet. Handle that in the runtime system.
9963 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
9964 __ mov(r0, Operand(ExternalReference::the_hole_value_location()));
9965 __ ldr(r0, MemOperand(r0, 0));
9966 __ mov(r1, Operand(ExternalReference(Top::k_pending_exception_address)));
9967 __ ldr(r1, MemOperand(r1, 0));
9968 __ cmp(r0, r1);
9969 __ b(eq, &runtime);
9970 __ bind(&failure);
9971 // For failure and exception return null.
9972 __ mov(r0, Operand(Factory::null_value()));
9973 __ add(sp, sp, Operand(4 * kPointerSize));
9974 __ Ret();
9975
9976 // Process the result from the native regexp code.
9977 __ bind(&success);
9978 __ ldr(r1,
9979 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
9980 // Calculate number of capture registers (number_of_captures + 1) * 2.
9981 ASSERT_EQ(0, kSmiTag);
9982 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
9983 __ add(r1, r1, Operand(2)); // r1 was a smi.
9984
9985 // r1: number of capture registers
9986 // r4: subject string
9987 // Store the capture count.
9988 __ mov(r2, Operand(r1, LSL, kSmiTagSize + kSmiShiftSize)); // To smi.
9989 __ str(r2, FieldMemOperand(last_match_info_elements,
9990 RegExpImpl::kLastCaptureCountOffset));
9991 // Store last subject and last input.
9992 __ mov(r3, last_match_info_elements); // Moved up to reduce latency.
Steve Block6ded16b2010-05-10 14:33:55 +01009993 __ str(subject,
9994 FieldMemOperand(last_match_info_elements,
9995 RegExpImpl::kLastSubjectOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01009996 __ RecordWrite(r3, Operand(RegExpImpl::kLastSubjectOffset), r2, r7);
Steve Block6ded16b2010-05-10 14:33:55 +01009997 __ str(subject,
9998 FieldMemOperand(last_match_info_elements,
9999 RegExpImpl::kLastInputOffset));
10000 __ mov(r3, last_match_info_elements);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010010001 __ RecordWrite(r3, Operand(RegExpImpl::kLastInputOffset), r2, r7);
Steve Block6ded16b2010-05-10 14:33:55 +010010002
10003 // Get the static offsets vector filled by the native regexp code.
10004 ExternalReference address_of_static_offsets_vector =
10005 ExternalReference::address_of_static_offsets_vector();
10006 __ mov(r2, Operand(address_of_static_offsets_vector));
10007
10008 // r1: number of capture registers
10009 // r2: offsets vector
10010 Label next_capture, done;
10011 // Capture register counter starts from number of capture registers and
10012 // counts down until wraping after zero.
10013 __ add(r0,
10014 last_match_info_elements,
10015 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
10016 __ bind(&next_capture);
10017 __ sub(r1, r1, Operand(1), SetCC);
10018 __ b(mi, &done);
10019 // Read the value from the static offsets vector buffer.
10020 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
10021 // Store the smi value in the last match info.
10022 __ mov(r3, Operand(r3, LSL, kSmiTagSize));
10023 __ str(r3, MemOperand(r0, kPointerSize, PostIndex));
10024 __ jmp(&next_capture);
10025 __ bind(&done);
10026
10027 // Return last match info.
10028 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
10029 __ add(sp, sp, Operand(4 * kPointerSize));
10030 __ Ret();
10031
10032 // Do the runtime call to execute the regexp.
10033 __ bind(&runtime);
10034 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Kristian Monsen25f61362010-05-21 11:50:48 +010010035#endif // V8_INTERPRETED_REGEXP
Steve Blocka7e24c12009-10-30 11:49:00 +000010036}
10037
10038
10039void CallFunctionStub::Generate(MacroAssembler* masm) {
10040 Label slow;
Leon Clarkee46be812010-01-19 14:06:41 +000010041
10042 // If the receiver might be a value (string, number or boolean) check for this
10043 // and box it if it is.
10044 if (ReceiverMightBeValue()) {
10045 // Get the receiver from the stack.
10046 // function, receiver [, arguments]
10047 Label receiver_is_value, receiver_is_js_object;
10048 __ ldr(r1, MemOperand(sp, argc_ * kPointerSize));
10049
10050 // Check if receiver is a smi (which is a number value).
10051 __ BranchOnSmi(r1, &receiver_is_value);
10052
10053 // Check if the receiver is a valid JS object.
10054 __ CompareObjectType(r1, r2, r2, FIRST_JS_OBJECT_TYPE);
10055 __ b(ge, &receiver_is_js_object);
10056
10057 // Call the runtime to box the value.
10058 __ bind(&receiver_is_value);
10059 __ EnterInternalFrame();
10060 __ push(r1);
10061 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
10062 __ LeaveInternalFrame();
10063 __ str(r0, MemOperand(sp, argc_ * kPointerSize));
10064
10065 __ bind(&receiver_is_js_object);
10066 }
10067
Steve Blocka7e24c12009-10-30 11:49:00 +000010068 // Get the function to call from the stack.
10069 // function, receiver [, arguments]
10070 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
10071
10072 // Check that the function is really a JavaScript function.
10073 // r1: pushed function (to be verified)
10074 __ BranchOnSmi(r1, &slow);
10075 // Get the map of the function object.
10076 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
10077 __ b(ne, &slow);
10078
10079 // Fast-case: Invoke the function now.
10080 // r1: pushed function
10081 ParameterCount actual(argc_);
10082 __ InvokeFunction(r1, actual, JUMP_FUNCTION);
10083
10084 // Slow-case: Non-function called.
10085 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +000010086 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
10087 // of the original receiver from the call site).
10088 __ str(r1, MemOperand(sp, argc_ * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +000010089 __ mov(r0, Operand(argc_)); // Setup the number of arguments.
10090 __ mov(r2, Operand(0));
10091 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
10092 __ Jump(Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)),
10093 RelocInfo::CODE_TARGET);
10094}
10095
10096
Steve Block6ded16b2010-05-10 14:33:55 +010010097// Unfortunately you have to run without snapshots to see most of these
10098// names in the profile since most compare stubs end up in the snapshot.
Leon Clarkee46be812010-01-19 14:06:41 +000010099const char* CompareStub::GetName() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010100 ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
10101 (lhs_.is(r1) && rhs_.is(r0)));
10102
Steve Block6ded16b2010-05-10 14:33:55 +010010103 if (name_ != NULL) return name_;
10104 const int kMaxNameLength = 100;
10105 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
10106 if (name_ == NULL) return "OOM";
10107
10108 const char* cc_name;
Leon Clarkee46be812010-01-19 14:06:41 +000010109 switch (cc_) {
Steve Block6ded16b2010-05-10 14:33:55 +010010110 case lt: cc_name = "LT"; break;
10111 case gt: cc_name = "GT"; break;
10112 case le: cc_name = "LE"; break;
10113 case ge: cc_name = "GE"; break;
10114 case eq: cc_name = "EQ"; break;
10115 case ne: cc_name = "NE"; break;
10116 default: cc_name = "UnknownCondition"; break;
Leon Clarkee46be812010-01-19 14:06:41 +000010117 }
Steve Block6ded16b2010-05-10 14:33:55 +010010118
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010119 const char* lhs_name = lhs_.is(r0) ? "_r0" : "_r1";
10120 const char* rhs_name = rhs_.is(r0) ? "_r0" : "_r1";
10121
Steve Block6ded16b2010-05-10 14:33:55 +010010122 const char* strict_name = "";
10123 if (strict_ && (cc_ == eq || cc_ == ne)) {
10124 strict_name = "_STRICT";
10125 }
10126
10127 const char* never_nan_nan_name = "";
10128 if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
10129 never_nan_nan_name = "_NO_NAN";
10130 }
10131
10132 const char* include_number_compare_name = "";
10133 if (!include_number_compare_) {
10134 include_number_compare_name = "_NO_NUMBER";
10135 }
10136
10137 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010138 "CompareStub_%s%s%s%s%s%s",
Steve Block6ded16b2010-05-10 14:33:55 +010010139 cc_name,
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010140 lhs_name,
10141 rhs_name,
Steve Block6ded16b2010-05-10 14:33:55 +010010142 strict_name,
10143 never_nan_nan_name,
10144 include_number_compare_name);
10145 return name_;
Leon Clarkee46be812010-01-19 14:06:41 +000010146}
10147
10148
Steve Blocka7e24c12009-10-30 11:49:00 +000010149int CompareStub::MinorKey() {
Steve Block6ded16b2010-05-10 14:33:55 +010010150 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
10151 // stubs the never NaN NaN condition is only taken into account if the
10152 // condition is equals.
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010153 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12));
10154 ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
10155 (lhs_.is(r1) && rhs_.is(r0)));
Steve Block6ded16b2010-05-10 14:33:55 +010010156 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
Ben Murdoch3bec4d22010-07-22 14:51:16 +010010157 | RegisterField::encode(lhs_.is(r0))
Steve Block6ded16b2010-05-10 14:33:55 +010010158 | StrictField::encode(strict_)
10159 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
10160 | IncludeNumberCompareField::encode(include_number_compare_);
Steve Blocka7e24c12009-10-30 11:49:00 +000010161}
10162
10163
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010164// StringCharCodeAtGenerator
10165
10166void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
10167 Label flat_string;
Steve Block6ded16b2010-05-10 14:33:55 +010010168 Label ascii_string;
10169 Label got_char_code;
10170
10171 // If the receiver is a smi trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010172 __ BranchOnSmi(object_, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010010173
10174 // Fetch the instance type of the receiver into result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010175 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
10176 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010177 // If the receiver is not a string trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010178 __ tst(result_, Operand(kIsNotStringMask));
10179 __ b(ne, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010010180
10181 // If the index is non-smi trigger the non-smi case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010182 __ BranchOnNotSmi(index_, &index_not_smi_);
10183
10184 // Put smi-tagged index into scratch register.
10185 __ mov(scratch_, index_);
10186 __ bind(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010010187
10188 // Check for index out of range.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010189 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
10190 __ cmp(ip, Operand(scratch_));
10191 __ b(ls, index_out_of_range_);
Steve Block6ded16b2010-05-10 14:33:55 +010010192
10193 // We need special handling for non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010194 ASSERT(kSeqStringTag == 0);
10195 __ tst(result_, Operand(kStringRepresentationMask));
10196 __ b(eq, &flat_string);
Steve Block6ded16b2010-05-10 14:33:55 +010010197
10198 // Handle non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010199 __ tst(result_, Operand(kIsConsStringMask));
10200 __ b(eq, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010010201
10202 // ConsString.
10203 // Check whether the right hand side is the empty string (i.e. if
10204 // this is really a flat string in a cons string). If that is not
10205 // the case we would rather go to the runtime system now to flatten
10206 // the string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010207 __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
10208 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
10209 __ cmp(result_, Operand(ip));
10210 __ b(ne, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010010211 // Get the first of the two strings and load its instance type.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010212 __ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
10213 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
10214 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
10215 // If the first cons component is also non-flat, then go to runtime.
10216 ASSERT(kSeqStringTag == 0);
10217 __ tst(result_, Operand(kStringRepresentationMask));
10218 __ b(nz, &call_runtime_);
10219
10220 // Check for 1-byte or 2-byte string.
10221 __ bind(&flat_string);
10222 ASSERT(kAsciiStringTag != 0);
10223 __ tst(result_, Operand(kStringEncodingMask));
10224 __ b(nz, &ascii_string);
10225
10226 // 2-byte string.
10227 // Load the 2-byte character code into the result register. We can
10228 // add without shifting since the smi tag size is the log2 of the
10229 // number of bytes in a two-byte character.
10230 ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
10231 __ add(scratch_, object_, Operand(scratch_));
10232 __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
10233 __ jmp(&got_char_code);
Steve Block6ded16b2010-05-10 14:33:55 +010010234
10235 // ASCII string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010236 // Load the byte into the result register.
Steve Block6ded16b2010-05-10 14:33:55 +010010237 __ bind(&ascii_string);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010238 __ add(scratch_, object_, Operand(scratch_, LSR, kSmiTagSize));
10239 __ ldrb(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010010240
10241 __ bind(&got_char_code);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010242 __ mov(result_, Operand(result_, LSL, kSmiTagSize));
10243 __ bind(&exit_);
Steve Block6ded16b2010-05-10 14:33:55 +010010244}
10245
10246
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010247void StringCharCodeAtGenerator::GenerateSlow(
10248 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
10249 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
Steve Block6ded16b2010-05-10 14:33:55 +010010250
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010251 // Index is not a smi.
10252 __ bind(&index_not_smi_);
10253 // If index is a heap number, try converting it to an integer.
10254 __ CheckMap(index_,
10255 scratch_,
10256 Heap::kHeapNumberMapRootIndex,
10257 index_not_number_,
10258 true);
10259 call_helper.BeforeCall(masm);
10260 __ Push(object_, index_);
10261 __ push(index_); // Consumed by runtime conversion function.
10262 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
10263 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
10264 } else {
10265 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
10266 // NumberToSmi discards numbers that are not exact integers.
10267 __ CallRuntime(Runtime::kNumberToSmi, 1);
10268 }
10269 if (!scratch_.is(r0)) {
10270 // Save the conversion result before the pop instructions below
10271 // have a chance to overwrite it.
10272 __ mov(scratch_, r0);
10273 }
10274 __ pop(index_);
10275 __ pop(object_);
10276 // Reload the instance type.
10277 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
10278 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
10279 call_helper.AfterCall(masm);
10280 // If index is still not a smi, it must be out of range.
10281 __ BranchOnNotSmi(scratch_, index_out_of_range_);
10282 // Otherwise, return to the fast path.
10283 __ jmp(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010010284
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010285 // Call runtime. We get here when the receiver is a string and the
10286 // index is a number, but the code of getting the actual character
10287 // is too complex (e.g., when the string needs to be flattened).
10288 __ bind(&call_runtime_);
10289 call_helper.BeforeCall(masm);
10290 __ Push(object_, index_);
10291 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
10292 if (!result_.is(r0)) {
10293 __ mov(result_, r0);
10294 }
10295 call_helper.AfterCall(masm);
10296 __ jmp(&exit_);
10297
10298 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
10299}
10300
10301
10302// -------------------------------------------------------------------------
10303// StringCharFromCodeGenerator
10304
10305void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +010010306 // Fast case of Heap::LookupSingleCharacterStringFromCode.
10307 ASSERT(kSmiTag == 0);
10308 ASSERT(kSmiShiftSize == 0);
10309 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010310 __ tst(code_,
10311 Operand(kSmiTagMask |
10312 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
10313 __ b(nz, &slow_case_);
Steve Block6ded16b2010-05-10 14:33:55 +010010314
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010315 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
10316 // At this point code register contains smi tagged ascii char code.
Steve Block6ded16b2010-05-10 14:33:55 +010010317 ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010318 __ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
10319 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
10320 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
10321 __ cmp(result_, Operand(ip));
10322 __ b(eq, &slow_case_);
10323 __ bind(&exit_);
10324}
Steve Block6ded16b2010-05-10 14:33:55 +010010325
Steve Block6ded16b2010-05-10 14:33:55 +010010326
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010327void StringCharFromCodeGenerator::GenerateSlow(
10328 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
10329 __ Abort("Unexpected fallthrough to CharFromCode slow case");
10330
10331 __ bind(&slow_case_);
10332 call_helper.BeforeCall(masm);
10333 __ push(code_);
10334 __ CallRuntime(Runtime::kCharFromCode, 1);
10335 if (!result_.is(r0)) {
10336 __ mov(result_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +010010337 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010338 call_helper.AfterCall(masm);
10339 __ jmp(&exit_);
10340
10341 __ Abort("Unexpected fallthrough from CharFromCode slow case");
10342}
10343
10344
10345// -------------------------------------------------------------------------
10346// StringCharAtGenerator
10347
10348void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
10349 char_code_at_generator_.GenerateFast(masm);
10350 char_from_code_generator_.GenerateFast(masm);
10351}
10352
10353
10354void StringCharAtGenerator::GenerateSlow(
10355 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
10356 char_code_at_generator_.GenerateSlow(masm, call_helper);
10357 char_from_code_generator_.GenerateSlow(masm, call_helper);
Steve Block6ded16b2010-05-10 14:33:55 +010010358}
10359
10360
10361void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
10362 Register dest,
10363 Register src,
10364 Register count,
10365 Register scratch,
10366 bool ascii) {
Andrei Popescu31002712010-02-23 13:46:05 +000010367 Label loop;
10368 Label done;
10369 // This loop just copies one character at a time, as it is only used for very
10370 // short strings.
10371 if (!ascii) {
10372 __ add(count, count, Operand(count), SetCC);
10373 } else {
10374 __ cmp(count, Operand(0));
10375 }
10376 __ b(eq, &done);
10377
10378 __ bind(&loop);
10379 __ ldrb(scratch, MemOperand(src, 1, PostIndex));
10380 // Perform sub between load and dependent store to get the load time to
10381 // complete.
10382 __ sub(count, count, Operand(1), SetCC);
10383 __ strb(scratch, MemOperand(dest, 1, PostIndex));
10384 // last iteration.
10385 __ b(gt, &loop);
10386
10387 __ bind(&done);
10388}
10389
10390
10391enum CopyCharactersFlags {
10392 COPY_ASCII = 1,
10393 DEST_ALWAYS_ALIGNED = 2
10394};
10395
10396
Steve Block6ded16b2010-05-10 14:33:55 +010010397void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
10398 Register dest,
10399 Register src,
10400 Register count,
10401 Register scratch1,
10402 Register scratch2,
10403 Register scratch3,
10404 Register scratch4,
10405 Register scratch5,
10406 int flags) {
Andrei Popescu31002712010-02-23 13:46:05 +000010407 bool ascii = (flags & COPY_ASCII) != 0;
10408 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
10409
10410 if (dest_always_aligned && FLAG_debug_code) {
10411 // Check that destination is actually word aligned if the flag says
10412 // that it is.
10413 __ tst(dest, Operand(kPointerAlignmentMask));
10414 __ Check(eq, "Destination of copy not aligned.");
10415 }
10416
10417 const int kReadAlignment = 4;
10418 const int kReadAlignmentMask = kReadAlignment - 1;
10419 // Ensure that reading an entire aligned word containing the last character
10420 // of a string will not read outside the allocated area (because we pad up
10421 // to kObjectAlignment).
10422 ASSERT(kObjectAlignment >= kReadAlignment);
10423 // Assumes word reads and writes are little endian.
10424 // Nothing to do for zero characters.
10425 Label done;
10426 if (!ascii) {
10427 __ add(count, count, Operand(count), SetCC);
10428 } else {
10429 __ cmp(count, Operand(0));
10430 }
10431 __ b(eq, &done);
10432
10433 // Assume that you cannot read (or write) unaligned.
10434 Label byte_loop;
10435 // Must copy at least eight bytes, otherwise just do it one byte at a time.
10436 __ cmp(count, Operand(8));
10437 __ add(count, dest, Operand(count));
10438 Register limit = count; // Read until src equals this.
10439 __ b(lt, &byte_loop);
10440
10441 if (!dest_always_aligned) {
10442 // Align dest by byte copying. Copies between zero and three bytes.
10443 __ and_(scratch4, dest, Operand(kReadAlignmentMask), SetCC);
10444 Label dest_aligned;
10445 __ b(eq, &dest_aligned);
10446 __ cmp(scratch4, Operand(2));
10447 __ ldrb(scratch1, MemOperand(src, 1, PostIndex));
10448 __ ldrb(scratch2, MemOperand(src, 1, PostIndex), le);
10449 __ ldrb(scratch3, MemOperand(src, 1, PostIndex), lt);
10450 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
10451 __ strb(scratch2, MemOperand(dest, 1, PostIndex), le);
10452 __ strb(scratch3, MemOperand(dest, 1, PostIndex), lt);
10453 __ bind(&dest_aligned);
10454 }
10455
10456 Label simple_loop;
10457
10458 __ sub(scratch4, dest, Operand(src));
10459 __ and_(scratch4, scratch4, Operand(0x03), SetCC);
10460 __ b(eq, &simple_loop);
10461 // Shift register is number of bits in a source word that
10462 // must be combined with bits in the next source word in order
10463 // to create a destination word.
10464
10465 // Complex loop for src/dst that are not aligned the same way.
10466 {
10467 Label loop;
10468 __ mov(scratch4, Operand(scratch4, LSL, 3));
10469 Register left_shift = scratch4;
10470 __ and_(src, src, Operand(~3)); // Round down to load previous word.
10471 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
10472 // Store the "shift" most significant bits of scratch in the least
10473 // signficant bits (i.e., shift down by (32-shift)).
10474 __ rsb(scratch2, left_shift, Operand(32));
10475 Register right_shift = scratch2;
10476 __ mov(scratch1, Operand(scratch1, LSR, right_shift));
10477
10478 __ bind(&loop);
10479 __ ldr(scratch3, MemOperand(src, 4, PostIndex));
10480 __ sub(scratch5, limit, Operand(dest));
10481 __ orr(scratch1, scratch1, Operand(scratch3, LSL, left_shift));
10482 __ str(scratch1, MemOperand(dest, 4, PostIndex));
10483 __ mov(scratch1, Operand(scratch3, LSR, right_shift));
10484 // Loop if four or more bytes left to copy.
10485 // Compare to eight, because we did the subtract before increasing dst.
10486 __ sub(scratch5, scratch5, Operand(8), SetCC);
10487 __ b(ge, &loop);
10488 }
10489 // There is now between zero and three bytes left to copy (negative that
10490 // number is in scratch5), and between one and three bytes already read into
10491 // scratch1 (eight times that number in scratch4). We may have read past
10492 // the end of the string, but because objects are aligned, we have not read
10493 // past the end of the object.
10494 // Find the minimum of remaining characters to move and preloaded characters
10495 // and write those as bytes.
10496 __ add(scratch5, scratch5, Operand(4), SetCC);
10497 __ b(eq, &done);
10498 __ cmp(scratch4, Operand(scratch5, LSL, 3), ne);
10499 // Move minimum of bytes read and bytes left to copy to scratch4.
10500 __ mov(scratch5, Operand(scratch4, LSR, 3), LeaveCC, lt);
10501 // Between one and three (value in scratch5) characters already read into
10502 // scratch ready to write.
10503 __ cmp(scratch5, Operand(2));
10504 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
10505 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, ge);
10506 __ strb(scratch1, MemOperand(dest, 1, PostIndex), ge);
10507 __ mov(scratch1, Operand(scratch1, LSR, 8), LeaveCC, gt);
10508 __ strb(scratch1, MemOperand(dest, 1, PostIndex), gt);
10509 // Copy any remaining bytes.
10510 __ b(&byte_loop);
10511
10512 // Simple loop.
10513 // Copy words from src to dst, until less than four bytes left.
10514 // Both src and dest are word aligned.
10515 __ bind(&simple_loop);
10516 {
10517 Label loop;
10518 __ bind(&loop);
10519 __ ldr(scratch1, MemOperand(src, 4, PostIndex));
10520 __ sub(scratch3, limit, Operand(dest));
10521 __ str(scratch1, MemOperand(dest, 4, PostIndex));
10522 // Compare to 8, not 4, because we do the substraction before increasing
10523 // dest.
10524 __ cmp(scratch3, Operand(8));
10525 __ b(ge, &loop);
10526 }
10527
10528 // Copy bytes from src to dst until dst hits limit.
10529 __ bind(&byte_loop);
10530 __ cmp(dest, Operand(limit));
10531 __ ldrb(scratch1, MemOperand(src, 1, PostIndex), lt);
10532 __ b(ge, &done);
10533 __ strb(scratch1, MemOperand(dest, 1, PostIndex));
10534 __ b(&byte_loop);
10535
10536 __ bind(&done);
10537}
10538
10539
Steve Block6ded16b2010-05-10 14:33:55 +010010540void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
10541 Register c1,
10542 Register c2,
10543 Register scratch1,
10544 Register scratch2,
10545 Register scratch3,
10546 Register scratch4,
10547 Register scratch5,
10548 Label* not_found) {
10549 // Register scratch3 is the general scratch register in this function.
10550 Register scratch = scratch3;
10551
10552 // Make sure that both characters are not digits as such strings has a
10553 // different hash algorithm. Don't try to look for these in the symbol table.
10554 Label not_array_index;
10555 __ sub(scratch, c1, Operand(static_cast<int>('0')));
10556 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
10557 __ b(hi, &not_array_index);
10558 __ sub(scratch, c2, Operand(static_cast<int>('0')));
10559 __ cmp(scratch, Operand(static_cast<int>('9' - '0')));
10560
10561 // If check failed combine both characters into single halfword.
10562 // This is required by the contract of the method: code at the
10563 // not_found branch expects this combination in c1 register
10564 __ orr(c1, c1, Operand(c2, LSL, kBitsPerByte), LeaveCC, ls);
10565 __ b(ls, not_found);
10566
10567 __ bind(&not_array_index);
10568 // Calculate the two character string hash.
10569 Register hash = scratch1;
10570 StringHelper::GenerateHashInit(masm, hash, c1);
10571 StringHelper::GenerateHashAddCharacter(masm, hash, c2);
10572 StringHelper::GenerateHashGetHash(masm, hash);
10573
10574 // Collect the two characters in a register.
10575 Register chars = c1;
10576 __ orr(chars, chars, Operand(c2, LSL, kBitsPerByte));
10577
10578 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10579 // hash: hash of two character string.
10580
10581 // Load symbol table
10582 // Load address of first element of the symbol table.
10583 Register symbol_table = c2;
10584 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
10585
10586 // Load undefined value
10587 Register undefined = scratch4;
10588 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
10589
10590 // Calculate capacity mask from the symbol table capacity.
10591 Register mask = scratch2;
10592 __ ldr(mask, FieldMemOperand(symbol_table, SymbolTable::kCapacityOffset));
10593 __ mov(mask, Operand(mask, ASR, 1));
10594 __ sub(mask, mask, Operand(1));
10595
10596 // Calculate untagged address of the first element of the symbol table.
10597 Register first_symbol_table_element = symbol_table;
10598 __ add(first_symbol_table_element, symbol_table,
10599 Operand(SymbolTable::kElementsStartOffset - kHeapObjectTag));
10600
10601 // Registers
10602 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10603 // hash: hash of two character string
10604 // mask: capacity mask
10605 // first_symbol_table_element: address of the first element of
10606 // the symbol table
10607 // scratch: -
10608
10609 // Perform a number of probes in the symbol table.
10610 static const int kProbes = 4;
10611 Label found_in_symbol_table;
10612 Label next_probe[kProbes];
10613 for (int i = 0; i < kProbes; i++) {
10614 Register candidate = scratch5; // Scratch register contains candidate.
10615
10616 // Calculate entry in symbol table.
10617 if (i > 0) {
10618 __ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
10619 } else {
10620 __ mov(candidate, hash);
10621 }
10622
10623 __ and_(candidate, candidate, Operand(mask));
10624
10625 // Load the entry from the symble table.
10626 ASSERT_EQ(1, SymbolTable::kEntrySize);
10627 __ ldr(candidate,
10628 MemOperand(first_symbol_table_element,
10629 candidate,
10630 LSL,
10631 kPointerSizeLog2));
10632
10633 // If entry is undefined no string with this hash can be found.
10634 __ cmp(candidate, undefined);
10635 __ b(eq, not_found);
10636
10637 // If length is not 2 the string is not a candidate.
10638 __ ldr(scratch, FieldMemOperand(candidate, String::kLengthOffset));
10639 __ cmp(scratch, Operand(Smi::FromInt(2)));
10640 __ b(ne, &next_probe[i]);
10641
10642 // Check that the candidate is a non-external ascii string.
10643 __ ldr(scratch, FieldMemOperand(candidate, HeapObject::kMapOffset));
10644 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
10645 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch,
10646 &next_probe[i]);
10647
10648 // Check if the two characters match.
10649 // Assumes that word load is little endian.
10650 __ ldrh(scratch, FieldMemOperand(candidate, SeqAsciiString::kHeaderSize));
10651 __ cmp(chars, scratch);
10652 __ b(eq, &found_in_symbol_table);
10653 __ bind(&next_probe[i]);
10654 }
10655
10656 // No matching 2 character string found by probing.
10657 __ jmp(not_found);
10658
10659 // Scratch register contains result when we fall through to here.
10660 Register result = scratch;
10661 __ bind(&found_in_symbol_table);
10662 __ Move(r0, result);
10663}
10664
10665
10666void StringHelper::GenerateHashInit(MacroAssembler* masm,
10667 Register hash,
10668 Register character) {
10669 // hash = character + (character << 10);
10670 __ add(hash, character, Operand(character, LSL, 10));
10671 // hash ^= hash >> 6;
10672 __ eor(hash, hash, Operand(hash, ASR, 6));
10673}
10674
10675
10676void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
10677 Register hash,
10678 Register character) {
10679 // hash += character;
10680 __ add(hash, hash, Operand(character));
10681 // hash += hash << 10;
10682 __ add(hash, hash, Operand(hash, LSL, 10));
10683 // hash ^= hash >> 6;
10684 __ eor(hash, hash, Operand(hash, ASR, 6));
10685}
10686
10687
10688void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
10689 Register hash) {
10690 // hash += hash << 3;
10691 __ add(hash, hash, Operand(hash, LSL, 3));
10692 // hash ^= hash >> 11;
10693 __ eor(hash, hash, Operand(hash, ASR, 11));
10694 // hash += hash << 15;
10695 __ add(hash, hash, Operand(hash, LSL, 15), SetCC);
10696
10697 // if (hash == 0) hash = 27;
10698 __ mov(hash, Operand(27), LeaveCC, nz);
10699}
10700
10701
Andrei Popescu31002712010-02-23 13:46:05 +000010702void SubStringStub::Generate(MacroAssembler* masm) {
10703 Label runtime;
10704
10705 // Stack frame on entry.
10706 // lr: return address
10707 // sp[0]: to
10708 // sp[4]: from
10709 // sp[8]: string
10710
10711 // This stub is called from the native-call %_SubString(...), so
10712 // nothing can be assumed about the arguments. It is tested that:
10713 // "string" is a sequential string,
10714 // both "from" and "to" are smis, and
10715 // 0 <= from <= to <= string.length.
10716 // If any of these assumptions fail, we call the runtime system.
10717
10718 static const int kToOffset = 0 * kPointerSize;
10719 static const int kFromOffset = 1 * kPointerSize;
10720 static const int kStringOffset = 2 * kPointerSize;
10721
10722
10723 // Check bounds and smi-ness.
10724 __ ldr(r7, MemOperand(sp, kToOffset));
10725 __ ldr(r6, MemOperand(sp, kFromOffset));
10726 ASSERT_EQ(0, kSmiTag);
10727 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
10728 // I.e., arithmetic shift right by one un-smi-tags.
10729 __ mov(r2, Operand(r7, ASR, 1), SetCC);
10730 __ mov(r3, Operand(r6, ASR, 1), SetCC, cc);
10731 // If either r2 or r6 had the smi tag bit set, then carry is set now.
10732 __ b(cs, &runtime); // Either "from" or "to" is not a smi.
10733 __ b(mi, &runtime); // From is negative.
10734
10735 __ sub(r2, r2, Operand(r3), SetCC);
10736 __ b(mi, &runtime); // Fail if from > to.
Steve Block6ded16b2010-05-10 14:33:55 +010010737 // Special handling of sub-strings of length 1 and 2. One character strings
10738 // are handled in the runtime system (looked up in the single character
10739 // cache). Two character strings are looked for in the symbol cache.
Andrei Popescu31002712010-02-23 13:46:05 +000010740 __ cmp(r2, Operand(2));
Steve Block6ded16b2010-05-10 14:33:55 +010010741 __ b(lt, &runtime);
Andrei Popescu31002712010-02-23 13:46:05 +000010742
10743 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010744 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010745 // r6: from (smi)
10746 // r7: to (smi)
10747
10748 // Make sure first argument is a sequential (or flat) string.
10749 __ ldr(r5, MemOperand(sp, kStringOffset));
10750 ASSERT_EQ(0, kSmiTag);
10751 __ tst(r5, Operand(kSmiTagMask));
10752 __ b(eq, &runtime);
10753 Condition is_string = masm->IsObjectStringType(r5, r1);
10754 __ b(NegateCondition(is_string), &runtime);
10755
10756 // r1: instance type
10757 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010758 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010759 // r5: string
10760 // r6: from (smi)
10761 // r7: to (smi)
10762 Label seq_string;
10763 __ and_(r4, r1, Operand(kStringRepresentationMask));
10764 ASSERT(kSeqStringTag < kConsStringTag);
10765 ASSERT(kExternalStringTag > kConsStringTag);
10766 __ cmp(r4, Operand(kConsStringTag));
10767 __ b(gt, &runtime); // External strings go to runtime.
10768 __ b(lt, &seq_string); // Sequential strings are handled directly.
10769
10770 // Cons string. Try to recurse (once) on the first substring.
10771 // (This adds a little more generality than necessary to handle flattened
10772 // cons strings, but not much).
10773 __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset));
10774 __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
10775 __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
10776 __ tst(r1, Operand(kStringRepresentationMask));
10777 ASSERT_EQ(0, kSeqStringTag);
10778 __ b(ne, &runtime); // Cons and External strings go to runtime.
10779
10780 // Definitly a sequential string.
10781 __ bind(&seq_string);
10782
10783 // r1: instance type.
10784 // r2: length
Steve Block6ded16b2010-05-10 14:33:55 +010010785 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010786 // r5: string
10787 // r6: from (smi)
10788 // r7: to (smi)
10789 __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010790 __ cmp(r4, Operand(r7));
Andrei Popescu31002712010-02-23 13:46:05 +000010791 __ b(lt, &runtime); // Fail if to > length.
10792
10793 // r1: instance type.
10794 // r2: result string length.
Steve Block6ded16b2010-05-10 14:33:55 +010010795 // r3: from index (untaged smi)
Andrei Popescu31002712010-02-23 13:46:05 +000010796 // r5: string.
10797 // r6: from offset (smi)
10798 // Check for flat ascii string.
10799 Label non_ascii_flat;
10800 __ tst(r1, Operand(kStringEncodingMask));
10801 ASSERT_EQ(0, kTwoByteStringTag);
10802 __ b(eq, &non_ascii_flat);
10803
Steve Block6ded16b2010-05-10 14:33:55 +010010804 Label result_longer_than_two;
10805 __ cmp(r2, Operand(2));
10806 __ b(gt, &result_longer_than_two);
10807
10808 // Sub string of length 2 requested.
10809 // Get the two characters forming the sub string.
10810 __ add(r5, r5, Operand(r3));
10811 __ ldrb(r3, FieldMemOperand(r5, SeqAsciiString::kHeaderSize));
10812 __ ldrb(r4, FieldMemOperand(r5, SeqAsciiString::kHeaderSize + 1));
10813
10814 // Try to lookup two character string in symbol table.
10815 Label make_two_character_string;
10816 StringHelper::GenerateTwoCharacterSymbolTableProbe(
10817 masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
10818 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10819 __ add(sp, sp, Operand(3 * kPointerSize));
10820 __ Ret();
10821
10822 // r2: result string length.
10823 // r3: two characters combined into halfword in little endian byte order.
10824 __ bind(&make_two_character_string);
10825 __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
10826 __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
10827 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10828 __ add(sp, sp, Operand(3 * kPointerSize));
10829 __ Ret();
10830
10831 __ bind(&result_longer_than_two);
10832
Andrei Popescu31002712010-02-23 13:46:05 +000010833 // Allocate the result.
10834 __ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime);
10835
10836 // r0: result string.
10837 // r2: result string length.
10838 // r5: string.
10839 // r6: from offset (smi)
10840 // Locate first character of result.
10841 __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10842 // Locate 'from' character of string.
10843 __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10844 __ add(r5, r5, Operand(r6, ASR, 1));
10845
10846 // r0: result string.
10847 // r1: first character of result string.
10848 // r2: result string length.
10849 // r5: first character of sub string to copy.
10850 ASSERT_EQ(0, SeqAsciiString::kHeaderSize & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +010010851 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10852 COPY_ASCII | DEST_ALWAYS_ALIGNED);
Andrei Popescu31002712010-02-23 13:46:05 +000010853 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10854 __ add(sp, sp, Operand(3 * kPointerSize));
10855 __ Ret();
10856
10857 __ bind(&non_ascii_flat);
10858 // r2: result string length.
10859 // r5: string.
10860 // r6: from offset (smi)
10861 // Check for flat two byte string.
10862
10863 // Allocate the result.
10864 __ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime);
10865
10866 // r0: result string.
10867 // r2: result string length.
10868 // r5: string.
10869 // Locate first character of result.
10870 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10871 // Locate 'from' character of string.
10872 __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10873 // As "from" is a smi it is 2 times the value which matches the size of a two
10874 // byte character.
10875 __ add(r5, r5, Operand(r6));
10876
10877 // r0: result string.
10878 // r1: first character of result.
10879 // r2: result length.
10880 // r5: first character of string to copy.
10881 ASSERT_EQ(0, SeqTwoByteString::kHeaderSize & kObjectAlignmentMask);
Steve Block6ded16b2010-05-10 14:33:55 +010010882 StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
10883 DEST_ALWAYS_ALIGNED);
Andrei Popescu31002712010-02-23 13:46:05 +000010884 __ IncrementCounter(&Counters::sub_string_native, 1, r3, r4);
10885 __ add(sp, sp, Operand(3 * kPointerSize));
10886 __ Ret();
10887
10888 // Just jump to runtime to create the sub string.
10889 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010890 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Andrei Popescu31002712010-02-23 13:46:05 +000010891}
Leon Clarked91b9f72010-01-27 17:25:45 +000010892
10893
10894void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
10895 Register left,
10896 Register right,
10897 Register scratch1,
10898 Register scratch2,
10899 Register scratch3,
10900 Register scratch4) {
10901 Label compare_lengths;
10902 // Find minimum length and length difference.
10903 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
10904 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
10905 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
10906 Register length_delta = scratch3;
10907 __ mov(scratch1, scratch2, LeaveCC, gt);
10908 Register min_length = scratch1;
Steve Block6ded16b2010-05-10 14:33:55 +010010909 ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000010910 __ tst(min_length, Operand(min_length));
10911 __ b(eq, &compare_lengths);
10912
Steve Block6ded16b2010-05-10 14:33:55 +010010913 // Untag smi.
10914 __ mov(min_length, Operand(min_length, ASR, kSmiTagSize));
10915
Leon Clarked91b9f72010-01-27 17:25:45 +000010916 // Setup registers so that we only need to increment one register
10917 // in the loop.
10918 __ add(scratch2, min_length,
10919 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10920 __ add(left, left, Operand(scratch2));
10921 __ add(right, right, Operand(scratch2));
10922 // Registers left and right points to the min_length character of strings.
10923 __ rsb(min_length, min_length, Operand(-1));
10924 Register index = min_length;
10925 // Index starts at -min_length.
10926
10927 {
10928 // Compare loop.
10929 Label loop;
10930 __ bind(&loop);
10931 // Compare characters.
10932 __ add(index, index, Operand(1), SetCC);
10933 __ ldrb(scratch2, MemOperand(left, index), ne);
10934 __ ldrb(scratch4, MemOperand(right, index), ne);
10935 // Skip to compare lengths with eq condition true.
10936 __ b(eq, &compare_lengths);
10937 __ cmp(scratch2, scratch4);
10938 __ b(eq, &loop);
10939 // Fallthrough with eq condition false.
10940 }
10941 // Compare lengths - strings up to min-length are equal.
10942 __ bind(&compare_lengths);
10943 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
10944 // Use zero length_delta as result.
10945 __ mov(r0, Operand(length_delta), SetCC, eq);
10946 // Fall through to here if characters compare not-equal.
10947 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
10948 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
10949 __ Ret();
10950}
10951
10952
10953void StringCompareStub::Generate(MacroAssembler* masm) {
10954 Label runtime;
10955
10956 // Stack frame on entry.
Andrei Popescu31002712010-02-23 13:46:05 +000010957 // sp[0]: right string
10958 // sp[4]: left string
10959 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // left
10960 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // right
Leon Clarked91b9f72010-01-27 17:25:45 +000010961
10962 Label not_same;
10963 __ cmp(r0, r1);
10964 __ b(ne, &not_same);
10965 ASSERT_EQ(0, EQUAL);
10966 ASSERT_EQ(0, kSmiTag);
10967 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
10968 __ IncrementCounter(&Counters::string_compare_native, 1, r1, r2);
10969 __ add(sp, sp, Operand(2 * kPointerSize));
10970 __ Ret();
10971
10972 __ bind(&not_same);
10973
10974 // Check that both objects are sequential ascii strings.
10975 __ JumpIfNotBothSequentialAsciiStrings(r0, r1, r2, r3, &runtime);
10976
10977 // Compare flat ascii strings natively. Remove arguments from stack first.
10978 __ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
10979 __ add(sp, sp, Operand(2 * kPointerSize));
10980 GenerateCompareFlatAsciiStrings(masm, r0, r1, r2, r3, r4, r5);
10981
10982 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
10983 // tagged as a small integer.
10984 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010985 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarked91b9f72010-01-27 17:25:45 +000010986}
10987
10988
Andrei Popescu31002712010-02-23 13:46:05 +000010989void StringAddStub::Generate(MacroAssembler* masm) {
10990 Label string_add_runtime;
10991 // Stack on entry:
10992 // sp[0]: second argument.
10993 // sp[4]: first argument.
10994
10995 // Load the two arguments.
10996 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); // First argument.
10997 __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
10998
10999 // Make sure that both arguments are strings if not known in advance.
11000 if (string_check_) {
11001 ASSERT_EQ(0, kSmiTag);
11002 __ JumpIfEitherSmi(r0, r1, &string_add_runtime);
11003 // Load instance types.
11004 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11005 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11006 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11007 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11008 ASSERT_EQ(0, kStringTag);
11009 // If either is not a string, go to runtime.
11010 __ tst(r4, Operand(kIsNotStringMask));
11011 __ tst(r5, Operand(kIsNotStringMask), eq);
11012 __ b(ne, &string_add_runtime);
11013 }
11014
11015 // Both arguments are strings.
11016 // r0: first string
11017 // r1: second string
11018 // r4: first string instance type (if string_check_)
11019 // r5: second string instance type (if string_check_)
11020 {
11021 Label strings_not_empty;
11022 // Check if either of the strings are empty. In that case return the other.
11023 __ ldr(r2, FieldMemOperand(r0, String::kLengthOffset));
11024 __ ldr(r3, FieldMemOperand(r1, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011025 ASSERT(kSmiTag == 0);
11026 __ cmp(r2, Operand(Smi::FromInt(0))); // Test if first string is empty.
Andrei Popescu31002712010-02-23 13:46:05 +000011027 __ mov(r0, Operand(r1), LeaveCC, eq); // If first is empty, return second.
Steve Block6ded16b2010-05-10 14:33:55 +010011028 ASSERT(kSmiTag == 0);
11029 // Else test if second string is empty.
11030 __ cmp(r3, Operand(Smi::FromInt(0)), ne);
Andrei Popescu31002712010-02-23 13:46:05 +000011031 __ b(ne, &strings_not_empty); // If either string was empty, return r0.
11032
11033 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11034 __ add(sp, sp, Operand(2 * kPointerSize));
11035 __ Ret();
11036
11037 __ bind(&strings_not_empty);
11038 }
11039
Steve Block6ded16b2010-05-10 14:33:55 +010011040 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
11041 __ mov(r3, Operand(r3, ASR, kSmiTagSize));
Andrei Popescu31002712010-02-23 13:46:05 +000011042 // Both strings are non-empty.
11043 // r0: first string
11044 // r1: second string
11045 // r2: length of first string
11046 // r3: length of second string
11047 // r4: first string instance type (if string_check_)
11048 // r5: second string instance type (if string_check_)
11049 // Look at the length of the result of adding the two strings.
Steve Block6ded16b2010-05-10 14:33:55 +010011050 Label string_add_flat_result, longer_than_two;
Andrei Popescu31002712010-02-23 13:46:05 +000011051 // Adding two lengths can't overflow.
11052 ASSERT(String::kMaxLength * 2 > String::kMaxLength);
11053 __ add(r6, r2, Operand(r3));
11054 // Use the runtime system when adding two one character strings, as it
11055 // contains optimizations for this specific case using the symbol table.
11056 __ cmp(r6, Operand(2));
Steve Block6ded16b2010-05-10 14:33:55 +010011057 __ b(ne, &longer_than_two);
11058
11059 // Check that both strings are non-external ascii strings.
11060 if (!string_check_) {
11061 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11062 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11063 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11064 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11065 }
11066 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r4, r5, r6, r7,
11067 &string_add_runtime);
11068
11069 // Get the two characters forming the sub string.
11070 __ ldrb(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
11071 __ ldrb(r3, FieldMemOperand(r1, SeqAsciiString::kHeaderSize));
11072
11073 // Try to lookup two character string in symbol table. If it is not found
11074 // just allocate a new one.
11075 Label make_two_character_string;
11076 StringHelper::GenerateTwoCharacterSymbolTableProbe(
11077 masm, r2, r3, r6, r7, r4, r5, r9, &make_two_character_string);
11078 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11079 __ add(sp, sp, Operand(2 * kPointerSize));
11080 __ Ret();
11081
11082 __ bind(&make_two_character_string);
11083 // Resulting string has length 2 and first chars of two strings
11084 // are combined into single halfword in r2 register.
11085 // So we can fill resulting string without two loops by a single
11086 // halfword store instruction (which assumes that processor is
11087 // in a little endian mode)
11088 __ mov(r6, Operand(2));
11089 __ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime);
11090 __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
11091 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11092 __ add(sp, sp, Operand(2 * kPointerSize));
11093 __ Ret();
11094
11095 __ bind(&longer_than_two);
Andrei Popescu31002712010-02-23 13:46:05 +000011096 // Check if resulting string will be flat.
11097 __ cmp(r6, Operand(String::kMinNonFlatLength));
11098 __ b(lt, &string_add_flat_result);
11099 // Handle exceptionally long strings in the runtime system.
11100 ASSERT((String::kMaxLength & 0x80000000) == 0);
11101 ASSERT(IsPowerOf2(String::kMaxLength + 1));
11102 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
11103 __ cmp(r6, Operand(String::kMaxLength + 1));
11104 __ b(hs, &string_add_runtime);
11105
11106 // If result is not supposed to be flat, allocate a cons string object.
11107 // If both strings are ascii the result is an ascii cons string.
11108 if (!string_check_) {
11109 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11110 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11111 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11112 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11113 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011114 Label non_ascii, allocated, ascii_data;
Andrei Popescu31002712010-02-23 13:46:05 +000011115 ASSERT_EQ(0, kTwoByteStringTag);
11116 __ tst(r4, Operand(kStringEncodingMask));
11117 __ tst(r5, Operand(kStringEncodingMask), ne);
11118 __ b(eq, &non_ascii);
11119
11120 // Allocate an ASCII cons string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011121 __ bind(&ascii_data);
Andrei Popescu31002712010-02-23 13:46:05 +000011122 __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime);
11123 __ bind(&allocated);
11124 // Fill the fields of the cons string.
11125 __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
11126 __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
11127 __ mov(r0, Operand(r7));
11128 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11129 __ add(sp, sp, Operand(2 * kPointerSize));
11130 __ Ret();
11131
11132 __ bind(&non_ascii);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011133 // At least one of the strings is two-byte. Check whether it happens
11134 // to contain only ascii characters.
11135 // r4: first instance type.
11136 // r5: second instance type.
11137 __ tst(r4, Operand(kAsciiDataHintMask));
11138 __ tst(r5, Operand(kAsciiDataHintMask), ne);
11139 __ b(ne, &ascii_data);
11140 __ eor(r4, r4, Operand(r5));
11141 ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
11142 __ and_(r4, r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
11143 __ cmp(r4, Operand(kAsciiStringTag | kAsciiDataHintTag));
11144 __ b(eq, &ascii_data);
11145
Andrei Popescu31002712010-02-23 13:46:05 +000011146 // Allocate a two byte cons string.
11147 __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime);
11148 __ jmp(&allocated);
11149
11150 // Handle creating a flat result. First check that both strings are
11151 // sequential and that they have the same encoding.
11152 // r0: first string
11153 // r1: second string
11154 // r2: length of first string
11155 // r3: length of second string
11156 // r4: first string instance type (if string_check_)
11157 // r5: second string instance type (if string_check_)
11158 // r6: sum of lengths.
11159 __ bind(&string_add_flat_result);
11160 if (!string_check_) {
11161 __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
11162 __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
11163 __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
11164 __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
11165 }
11166 // Check that both strings are sequential.
11167 ASSERT_EQ(0, kSeqStringTag);
11168 __ tst(r4, Operand(kStringRepresentationMask));
11169 __ tst(r5, Operand(kStringRepresentationMask), eq);
11170 __ b(ne, &string_add_runtime);
11171 // Now check if both strings have the same encoding (ASCII/Two-byte).
11172 // r0: first string.
11173 // r1: second string.
11174 // r2: length of first string.
11175 // r3: length of second string.
11176 // r6: sum of lengths..
11177 Label non_ascii_string_add_flat_result;
11178 ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test.
11179 __ eor(r7, r4, Operand(r5));
11180 __ tst(r7, Operand(kStringEncodingMask));
11181 __ b(ne, &string_add_runtime);
11182 // And see if it's ASCII or two-byte.
11183 __ tst(r4, Operand(kStringEncodingMask));
11184 __ b(eq, &non_ascii_string_add_flat_result);
11185
11186 // Both strings are sequential ASCII strings. We also know that they are
11187 // short (since the sum of the lengths is less than kMinNonFlatLength).
Steve Block6ded16b2010-05-10 14:33:55 +010011188 // r6: length of resulting flat string
Andrei Popescu31002712010-02-23 13:46:05 +000011189 __ AllocateAsciiString(r7, r6, r4, r5, r9, &string_add_runtime);
11190 // Locate first character of result.
11191 __ add(r6, r7, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
11192 // Locate first character of first argument.
11193 __ add(r0, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
11194 // r0: first character of first string.
11195 // r1: second string.
11196 // r2: length of first string.
11197 // r3: length of second string.
11198 // r6: first character of result.
11199 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010011200 StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, true);
Andrei Popescu31002712010-02-23 13:46:05 +000011201
11202 // Load second argument and locate first character.
11203 __ add(r1, r1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
11204 // r1: first character of second string.
11205 // r3: length of second string.
11206 // r6: next character of result.
11207 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010011208 StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
Andrei Popescu31002712010-02-23 13:46:05 +000011209 __ mov(r0, Operand(r7));
11210 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11211 __ add(sp, sp, Operand(2 * kPointerSize));
11212 __ Ret();
11213
11214 __ bind(&non_ascii_string_add_flat_result);
11215 // Both strings are sequential two byte strings.
11216 // r0: first string.
11217 // r1: second string.
11218 // r2: length of first string.
11219 // r3: length of second string.
11220 // r6: sum of length of strings.
11221 __ AllocateTwoByteString(r7, r6, r4, r5, r9, &string_add_runtime);
11222 // r0: first string.
11223 // r1: second string.
11224 // r2: length of first string.
11225 // r3: length of second string.
11226 // r7: result string.
11227
11228 // Locate first character of result.
11229 __ add(r6, r7, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
11230 // Locate first character of first argument.
11231 __ add(r0, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
11232
11233 // r0: first character of first string.
11234 // r1: second string.
11235 // r2: length of first string.
11236 // r3: length of second string.
11237 // r6: first character of result.
11238 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010011239 StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, false);
Andrei Popescu31002712010-02-23 13:46:05 +000011240
11241 // Locate first character of second argument.
11242 __ add(r1, r1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
11243
11244 // r1: first character of second string.
11245 // r3: length of second string.
11246 // r6: next character of result (after copy of first string).
11247 // r7: result string.
Steve Block6ded16b2010-05-10 14:33:55 +010011248 StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
Andrei Popescu31002712010-02-23 13:46:05 +000011249
11250 __ mov(r0, Operand(r7));
11251 __ IncrementCounter(&Counters::string_add_native, 1, r2, r3);
11252 __ add(sp, sp, Operand(2 * kPointerSize));
11253 __ Ret();
11254
11255 // Just jump to runtime to add the two strings.
11256 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011257 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Andrei Popescu31002712010-02-23 13:46:05 +000011258}
11259
11260
Steve Blocka7e24c12009-10-30 11:49:00 +000011261#undef __
11262
11263} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010011264
11265#endif // V8_TARGET_ARCH_ARM