blob: 7d03e91fe1695fb3365a2277932de0b143cb63ba [file] [log] [blame]
danno@chromium.orgfa458e42012-02-01 10:48:36 +00001// Copyright 2012 the V8 project authors. All rights reserved.
lrn@chromium.org7516f052011-03-30 08:52:27 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_MIPS)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
karlklose@chromium.org83a47282011-05-11 11:54:09 +000034#include "codegen.h"
lrn@chromium.org7516f052011-03-30 08:52:27 +000035#include "regexp-macro-assembler.h"
mvstanton@chromium.org6bec0092013-01-23 13:46:53 +000036#include "stub-cache.h"
lrn@chromium.org7516f052011-03-30 08:52:27 +000037
38namespace v8 {
39namespace internal {
40
41
jkummerow@chromium.org59297c72013-01-09 16:32:23 +000042void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
43 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { a1, a0 };
46 descriptor->register_param_count_ = 2;
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ =
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +000049 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
jkummerow@chromium.org59297c72013-01-09 16:32:23 +000050}
51
52
lrn@chromium.org7516f052011-03-30 08:52:27 +000053#define __ ACCESS_MASM(masm)
54
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000055static void EmitIdenticalObjectComparison(MacroAssembler* masm,
56 Label* slow,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +000057 Condition cc);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000058static void EmitSmiNonsmiComparison(MacroAssembler* masm,
59 Register lhs,
60 Register rhs,
61 Label* rhs_not_nan,
62 Label* slow,
63 bool strict);
64static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc);
65static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
66 Register lhs,
67 Register rhs);
68
69
70// Check if the operand is a heap number.
71static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
72 Register scratch1, Register scratch2,
73 Label* not_a_heap_number) {
74 __ lw(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset));
75 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
76 __ Branch(not_a_heap_number, ne, scratch1, Operand(scratch2));
77}
78
lrn@chromium.org7516f052011-03-30 08:52:27 +000079
80void ToNumberStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000081 // The ToNumber stub takes one argument in a0.
82 Label check_heap_number, call_builtin;
83 __ JumpIfNotSmi(a0, &check_heap_number);
ulan@chromium.org6ff65142012-03-21 09:52:17 +000084 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000085 __ mov(v0, a0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000086
87 __ bind(&check_heap_number);
88 EmitCheckForHeapNumber(masm, a0, a1, t0, &call_builtin);
ulan@chromium.org6ff65142012-03-21 09:52:17 +000089 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000090 __ mov(v0, a0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000091
92 __ bind(&call_builtin);
93 __ push(a0);
94 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
lrn@chromium.org7516f052011-03-30 08:52:27 +000095}
96
97
98void FastNewClosureStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +000099 // Create a new closure from the given function info in new
100 // space. Set the context to the current context in cp.
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000101 Counters* counters = masm->isolate()->counters();
102
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000103 Label gc;
104
105 // Pop the function info from the stack.
106 __ pop(a3);
107
108 // Attempt to allocate new JSFunction in new space.
109 __ AllocateInNewSpace(JSFunction::kSize,
110 v0,
111 a1,
112 a2,
113 &gc,
114 TAG_OBJECT);
115
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000116 __ IncrementCounter(counters->fast_new_closure_total(), 1, t2, t3);
117
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +0000118 int map_index = (language_mode_ == CLASSIC_MODE)
119 ? Context::FUNCTION_MAP_INDEX
120 : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000121
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000122 // Compute the function map in the current native context and set that
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000123 // as the map of the allocated object.
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000124 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
125 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000126 __ lw(t1, MemOperand(a2, Context::SlotOffset(map_index)));
127 __ sw(t1, FieldMemOperand(v0, HeapObject::kMapOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000128
129 // Initialize the rest of the function. We don't have to update the
130 // write barrier because the allocated object is in new space.
131 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000132 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000133 __ sw(a1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
134 __ sw(a1, FieldMemOperand(v0, JSObject::kElementsOffset));
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000135 __ sw(t1, FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000136 __ sw(a3, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
137 __ sw(cp, FieldMemOperand(v0, JSFunction::kContextOffset));
138 __ sw(a1, FieldMemOperand(v0, JSFunction::kLiteralsOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000139
140 // Initialize the code pointer in the function to be the one
141 // found in the shared function info object.
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000142 // But first check if there is an optimized version for our context.
143 Label check_optimized;
144 Label install_unoptimized;
145 if (FLAG_cache_optimized_code) {
146 __ lw(a1,
147 FieldMemOperand(a3, SharedFunctionInfo::kOptimizedCodeMapOffset));
148 __ And(at, a1, a1);
149 __ Branch(&check_optimized, ne, at, Operand(zero_reg));
150 }
151 __ bind(&install_unoptimized);
152 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
153 __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000154 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
155 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000156
157 // Return result. The argument function info has been popped already.
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000158 __ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000159 __ Ret();
160
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000161 __ bind(&check_optimized);
162
163 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, t2, t3);
164
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000165 // a2 holds native context, a1 points to fixed array of 3-element entries
166 // (native context, optimized code, literals).
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +0000167 // The optimized code map must never be empty, so check the first elements.
168 Label install_optimized;
169 // Speculatively move code object into t0.
170 __ lw(t0, FieldMemOperand(a1, FixedArray::kHeaderSize + kPointerSize));
171 __ lw(t1, FieldMemOperand(a1, FixedArray::kHeaderSize));
172 __ Branch(&install_optimized, eq, a2, Operand(t1));
173
174 // Iterate through the rest of map backwards. t0 holds an index as a Smi.
175 Label loop;
176 __ lw(t0, FieldMemOperand(a1, FixedArray::kLengthOffset));
177 __ bind(&loop);
178 // Do not double check first entry.
179
180 __ Branch(&install_unoptimized, eq, t0,
181 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
182 __ Subu(t0, t0, Operand(
183 Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry.
184 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
185 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize);
186 __ Addu(t1, t1, Operand(at));
187 __ lw(t1, MemOperand(t1));
188 __ Branch(&loop, ne, a2, Operand(t1));
189 // Hit: fetch the optimized code.
190 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
191 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize);
192 __ Addu(t1, t1, Operand(at));
193 __ Addu(t1, t1, Operand(kPointerSize));
194 __ lw(t0, MemOperand(t1));
195
196 __ bind(&install_optimized);
197 __ IncrementCounter(counters->fast_new_closure_install_optimized(),
198 1, t2, t3);
199
200 // TODO(fschneider): Idea: store proper code pointers in the map and either
201 // unmangle them on marking or do nothing as the whole map is discarded on
202 // major GC anyway.
203 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
204 __ sw(t0, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
205
206 // Now link a function into a list of optimized functions.
207 __ lw(t0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
208
209 __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset));
210 // No need for write barrier as JSFunction (eax) is in the new space.
211
212 __ sw(v0, ContextOperand(a2, Context::OPTIMIZED_FUNCTIONS_LIST));
213 // Store JSFunction (eax) into edx before issuing write barrier as
214 // it clobbers all the registers passed.
215 __ mov(t0, v0);
216 __ RecordWriteContextSlot(
217 a2,
218 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
219 t0,
220 a1,
221 kRAHasNotBeenSaved,
222 kDontSaveFPRegs);
223
224 // Return result. The argument function info has been popped already.
225 __ Ret();
226
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000227 // Create a new closure through the slower runtime call.
228 __ bind(&gc);
229 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
230 __ Push(cp, a3, t0);
231 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000232}
233
234
235void FastNewContextStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000236 // Try to allocate the context in new space.
237 Label gc;
238 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
239
240 // Attempt to allocate the context in new space.
241 __ AllocateInNewSpace(FixedArray::SizeFor(length),
242 v0,
243 a1,
244 a2,
245 &gc,
246 TAG_OBJECT);
247
248 // Load the function from the stack.
249 __ lw(a3, MemOperand(sp, 0));
250
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +0000251 // Set up the object header.
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000252 __ LoadRoot(a1, Heap::kFunctionContextMapRootIndex);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000253 __ li(a2, Operand(Smi::FromInt(length)));
254 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000255 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000256
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000257 // Set up the fixed slots, copy the global object from the previous context.
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000258 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000259 __ li(a1, Operand(Smi::FromInt(0)));
260 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX)));
svenpanne@chromium.org6d786c92011-06-15 10:58:27 +0000261 __ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000262 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX)));
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000263 __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000264
265 // Initialize the rest of the slots to undefined.
266 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
267 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
268 __ sw(a1, MemOperand(v0, Context::SlotOffset(i)));
269 }
270
271 // Remove the on-stack argument and return.
272 __ mov(cp, v0);
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000273 __ DropAndRet(1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000274
275 // Need to collect. Call into runtime system.
276 __ bind(&gc);
svenpanne@chromium.org6d786c92011-06-15 10:58:27 +0000277 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000278}
279
280
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000281void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
282 // Stack layout on entry:
283 //
284 // [sp]: function.
285 // [sp + kPointerSize]: serialized scope info
286
287 // Try to allocate the context in new space.
288 Label gc;
289 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
290 __ AllocateInNewSpace(FixedArray::SizeFor(length),
291 v0, a1, a2, &gc, TAG_OBJECT);
292
293 // Load the function from the stack.
294 __ lw(a3, MemOperand(sp, 0));
295
296 // Load the serialized scope info from the stack.
297 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
298
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +0000299 // Set up the object header.
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000300 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
301 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
302 __ li(a2, Operand(Smi::FromInt(length)));
303 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
304
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000305 // If this block context is nested in the native context we get a smi
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000306 // sentinel instead of a function. The block context should get the
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000307 // canonical empty function of the native context as its closure which
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000308 // we still have to look up.
309 Label after_sentinel;
310 __ JumpIfNotSmi(a3, &after_sentinel);
311 if (FLAG_debug_code) {
312 const char* message = "Expected 0 as a Smi sentinel";
313 __ Assert(eq, message, a3, Operand(zero_reg));
314 }
315 __ lw(a3, GlobalObjectOperand());
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000316 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000317 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
318 __ bind(&after_sentinel);
319
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000320 // Set up the fixed slots, copy the global object from the previous context.
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000321 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000322 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
323 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
324 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000325 __ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000326
327 // Initialize the rest of the slots to the hole value.
328 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
329 for (int i = 0; i < slots_; i++) {
330 __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS));
331 }
332
333 // Remove the on-stack argument and return.
334 __ mov(cp, v0);
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000335 __ DropAndRet(2);
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +0000336
337 // Need to collect. Call into runtime system.
338 __ bind(&gc);
339 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
340}
341
342
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000343static void GenerateFastCloneShallowArrayCommon(
344 MacroAssembler* masm,
345 int length,
346 FastCloneShallowArrayStub::Mode mode,
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000347 AllocationSiteMode allocation_site_mode,
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000348 Label* fail) {
349 // Registers on entry:
350 // a3: boilerplate literal array.
351 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000352
353 // All sizes here are multiples of kPointerSize.
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +0000354 int elements_size = 0;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000355 if (length > 0) {
356 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
357 ? FixedDoubleArray::SizeFor(length)
358 : FixedArray::SizeFor(length);
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +0000359 }
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000360
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000361 int size = JSArray::kSize;
362 int allocation_info_start = size;
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000363 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000364 size += AllocationSiteInfo::kSize;
365 }
366 size += elements_size;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000367
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000368 // Allocate both the JS array and the elements array in one big
369 // allocation. This avoids multiple limit checks.
370 __ AllocateInNewSpace(size,
371 v0,
372 a1,
373 a2,
374 fail,
375 TAG_OBJECT);
376
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000377 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000378 __ li(a2, Operand(Handle<Map>(masm->isolate()->heap()->
379 allocation_site_info_map())));
380 __ sw(a2, FieldMemOperand(v0, allocation_info_start));
381 __ sw(a3, FieldMemOperand(v0, allocation_info_start + kPointerSize));
382 }
383
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000384 // Copy the JS array part.
385 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
386 if ((i != JSArray::kElementsOffset) || (length == 0)) {
387 __ lw(a1, FieldMemOperand(a3, i));
388 __ sw(a1, FieldMemOperand(v0, i));
389 }
390 }
391
392 if (length > 0) {
393 // Get hold of the elements array of the boilerplate and setup the
394 // elements pointer in the resulting object.
395 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000396 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000397 __ Addu(a2, v0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
398 } else {
399 __ Addu(a2, v0, Operand(JSArray::kSize));
400 }
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000401 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
402
403 // Copy the elements array.
404 ASSERT((elements_size % kPointerSize) == 0);
405 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
406 }
407}
408
409void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
410 // Stack layout on entry:
411 //
412 // [sp]: constant elements.
413 // [sp + kPointerSize]: literal index.
414 // [sp + (2 * kPointerSize)]: literals array.
415
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000416 // Load boilerplate object into r3 and check if we need to create a
417 // boilerplate.
418 Label slow_case;
419 __ lw(a3, MemOperand(sp, 2 * kPointerSize));
420 __ lw(a0, MemOperand(sp, 1 * kPointerSize));
421 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
422 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
423 __ Addu(t0, a3, t0);
424 __ lw(a3, MemOperand(t0));
425 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
426 __ Branch(&slow_case, eq, a3, Operand(t1));
427
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000428 FastCloneShallowArrayStub::Mode mode = mode_;
429 if (mode == CLONE_ANY_ELEMENTS) {
430 Label double_elements, check_fast_elements;
431 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
432 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
433 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex);
434 __ Branch(&check_fast_elements, ne, v0, Operand(t1));
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000435 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
436 allocation_site_mode_,
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000437 &slow_case);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000438 // Return and remove the on-stack parameters.
439 __ DropAndRet(3);
440
441 __ bind(&check_fast_elements);
442 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
443 __ Branch(&double_elements, ne, v0, Operand(t1));
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000444 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
445 allocation_site_mode_,
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000446 &slow_case);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000447 // Return and remove the on-stack parameters.
448 __ DropAndRet(3);
449
450 __ bind(&double_elements);
451 mode = CLONE_DOUBLE_ELEMENTS;
452 // Fall through to generate the code to handle double elements.
453 }
454
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000455 if (FLAG_debug_code) {
456 const char* message;
457 Heap::RootListIndex expected_map_index;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000458 if (mode == CLONE_ELEMENTS) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000459 message = "Expected (writable) fixed array";
460 expected_map_index = Heap::kFixedArrayMapRootIndex;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000461 } else if (mode == CLONE_DOUBLE_ELEMENTS) {
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +0000462 message = "Expected (writable) fixed double array";
463 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000464 } else {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000465 ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000466 message = "Expected copy-on-write fixed array";
467 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
468 }
469 __ push(a3);
470 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
471 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset));
472 __ LoadRoot(at, expected_map_index);
473 __ Assert(eq, message, a3, Operand(at));
474 __ pop(a3);
475 }
476
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000477 GenerateFastCloneShallowArrayCommon(masm, length_, mode,
yangguo@chromium.org28381b42013-01-21 14:39:38 +0000478 allocation_site_mode_,
479 &slow_case);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000480
481 // Return and remove the on-stack parameters.
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000482 __ DropAndRet(3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000483
484 __ bind(&slow_case);
485 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000486}
487
488
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000489void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
490 // Stack layout on entry:
491 //
492 // [sp]: object literal flags.
493 // [sp + kPointerSize]: constant properties.
494 // [sp + (2 * kPointerSize)]: literal index.
495 // [sp + (3 * kPointerSize)]: literals array.
496
497 // Load boilerplate object into a3 and check if we need to create a
498 // boilerplate.
499 Label slow_case;
500 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
501 __ lw(a0, MemOperand(sp, 2 * kPointerSize));
502 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
503 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
504 __ Addu(a3, t0, a3);
505 __ lw(a3, MemOperand(a3));
506 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
507 __ Branch(&slow_case, eq, a3, Operand(t0));
508
509 // Check that the boilerplate contains only fast properties and we can
510 // statically determine the instance size.
511 int size = JSObject::kHeaderSize + length_ * kPointerSize;
512 __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset));
513 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset));
514 __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2));
515
516 // Allocate the JS object and copy header together with all in-object
517 // properties from the boilerplate.
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000518 __ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000519 for (int i = 0; i < size; i += kPointerSize) {
520 __ lw(a1, FieldMemOperand(a3, i));
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000521 __ sw(a1, FieldMemOperand(v0, i));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000522 }
523
524 // Return and remove the on-stack parameters.
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000525 __ DropAndRet(4);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000526
527 __ bind(&slow_case);
528 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
529}
530
531
lrn@chromium.org7516f052011-03-30 08:52:27 +0000532// Takes a Smi and converts to an IEEE 64 bit floating point value in two
533// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
534// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
535// scratch register. Destroys the source register. No GC occurs during this
536// stub so you don't have to set up the frame.
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000537class ConvertToDoubleStub : public PlatformCodeStub {
lrn@chromium.org7516f052011-03-30 08:52:27 +0000538 public:
539 ConvertToDoubleStub(Register result_reg_1,
540 Register result_reg_2,
541 Register source_reg,
542 Register scratch_reg)
543 : result1_(result_reg_1),
544 result2_(result_reg_2),
545 source_(source_reg),
546 zeros_(scratch_reg) { }
547
548 private:
549 Register result1_;
550 Register result2_;
551 Register source_;
552 Register zeros_;
553
554 // Minor key encoding in 16 bits.
555 class ModeBits: public BitField<OverwriteMode, 0, 2> {};
556 class OpBits: public BitField<Token::Value, 2, 14> {};
557
558 Major MajorKey() { return ConvertToDouble; }
559 int MinorKey() {
560 // Encode the parameters in a unique 16 bit value.
561 return result1_.code() +
562 (result2_.code() << 4) +
563 (source_.code() << 8) +
564 (zeros_.code() << 12);
565 }
566
567 void Generate(MacroAssembler* masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000568};
569
570
571void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000572#ifndef BIG_ENDIAN_FLOATING_POINT
573 Register exponent = result1_;
574 Register mantissa = result2_;
575#else
576 Register exponent = result2_;
577 Register mantissa = result1_;
578#endif
579 Label not_special;
580 // Convert from Smi to integer.
581 __ sra(source_, source_, kSmiTagSize);
582 // Move sign bit from source to destination. This works because the sign bit
583 // in the exponent word of the double has the same position and polarity as
584 // the 2's complement sign bit in a Smi.
585 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
586 __ And(exponent, source_, Operand(HeapNumber::kSignMask));
587 // Subtract from 0 if source was negative.
588 __ subu(at, zero_reg, source_);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +0000589 __ Movn(source_, at, exponent);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000590
591 // We have -1, 0 or 1, which we treat specially. Register source_ contains
592 // absolute value: it is either equal to 1 (special case of -1 and 1),
593 // greater than 1 (not a special case) or less than 1 (special case of 0).
594 __ Branch(&not_special, gt, source_, Operand(1));
595
596 // For 1 or -1 we need to or in the 0 exponent (biased to 1023).
fschneider@chromium.org7d10be52012-04-10 12:30:14 +0000597 const uint32_t exponent_word_for_1 =
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000598 HeapNumber::kExponentBias << HeapNumber::kExponentShift;
599 // Safe to use 'at' as dest reg here.
600 __ Or(at, exponent, Operand(exponent_word_for_1));
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +0000601 __ Movn(exponent, at, source_); // Write exp when source not 0.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000602 // 1, 0 and -1 all have 0 for the second word.
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000603 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000604 __ mov(mantissa, zero_reg);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000605
606 __ bind(&not_special);
607 // Count leading zeros.
608 // Gets the wrong answer for 0, but we already checked for that case above.
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +0000609 __ Clz(zeros_, source_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000610 // Compute exponent and or it into the exponent register.
611 // We use mantissa as a scratch register here.
612 __ li(mantissa, Operand(31 + HeapNumber::kExponentBias));
613 __ subu(mantissa, mantissa, zeros_);
614 __ sll(mantissa, mantissa, HeapNumber::kExponentShift);
615 __ Or(exponent, exponent, mantissa);
616
617 // Shift up the source chopping the top bit off.
618 __ Addu(zeros_, zeros_, Operand(1));
619 // This wouldn't work for 1.0 or -1.0 as the shift would be 32 which means 0.
620 __ sllv(source_, source_, zeros_);
621 // Compute lower part of fraction (last 12 bits).
622 __ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord);
623 // And the top (top 20 bits).
624 __ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000625
ulan@chromium.org6ff65142012-03-21 09:52:17 +0000626 __ Ret(USE_DELAY_SLOT);
627 __ or_(exponent, exponent, source_);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000628}
629
630
lrn@chromium.org7516f052011-03-30 08:52:27 +0000631void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
632 FloatingPointHelper::Destination destination,
633 Register scratch1,
634 Register scratch2) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000635 if (CpuFeatures::IsSupported(FPU)) {
636 CpuFeatures::Scope scope(FPU);
637 __ sra(scratch1, a0, kSmiTagSize);
638 __ mtc1(scratch1, f14);
639 __ cvt_d_w(f14, f14);
640 __ sra(scratch1, a1, kSmiTagSize);
641 __ mtc1(scratch1, f12);
642 __ cvt_d_w(f12, f12);
643 if (destination == kCoreRegisters) {
danno@chromium.org40cb8782011-05-25 07:58:50 +0000644 __ Move(a2, a3, f14);
645 __ Move(a0, a1, f12);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000646 }
647 } else {
648 ASSERT(destination == kCoreRegisters);
649 // Write Smi from a0 to a3 and a2 in double format.
650 __ mov(scratch1, a0);
651 ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2);
652 __ push(ra);
sgjesse@chromium.org6db88712011-07-11 11:41:22 +0000653 __ Call(stub1.GetCode());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000654 // Write Smi from a1 to a1 and a0 in double format.
655 __ mov(scratch1, a1);
656 ConvertToDoubleStub stub2(a1, a0, scratch1, scratch2);
sgjesse@chromium.org6db88712011-07-11 11:41:22 +0000657 __ Call(stub2.GetCode());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000658 __ pop(ra);
659 }
lrn@chromium.org7516f052011-03-30 08:52:27 +0000660}
661
662
lrn@chromium.org7516f052011-03-30 08:52:27 +0000663void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
664 Destination destination,
665 Register object,
666 FPURegister dst,
667 Register dst1,
668 Register dst2,
669 Register heap_number_map,
670 Register scratch1,
671 Register scratch2,
672 Label* not_number) {
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000673 __ AssertRootValue(heap_number_map,
674 Heap::kHeapNumberMapRootIndex,
675 "HeapNumberMap register clobbered.");
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000676
677 Label is_smi, done;
678
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000679 // Smi-check
680 __ UntagAndJumpIfSmi(scratch1, object, &is_smi);
681 // Heap number check
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000682 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
683
684 // Handle loading a double from a heap number.
685 if (CpuFeatures::IsSupported(FPU) &&
686 destination == kFPURegisters) {
687 CpuFeatures::Scope scope(FPU);
688 // Load the double from tagged HeapNumber to double register.
689
690 // ARM uses a workaround here because of the unaligned HeapNumber
691 // kValueOffset. On MIPS this workaround is built into ldc1 so there's no
692 // point in generating even more instructions.
693 __ ldc1(dst, FieldMemOperand(object, HeapNumber::kValueOffset));
694 } else {
695 ASSERT(destination == kCoreRegisters);
696 // Load the double from heap number to dst1 and dst2 in double format.
697 __ lw(dst1, FieldMemOperand(object, HeapNumber::kValueOffset));
698 __ lw(dst2, FieldMemOperand(object,
699 HeapNumber::kValueOffset + kPointerSize));
700 }
701 __ Branch(&done);
702
703 // Handle loading a double from a smi.
704 __ bind(&is_smi);
705 if (CpuFeatures::IsSupported(FPU)) {
706 CpuFeatures::Scope scope(FPU);
707 // Convert smi to double using FPU instructions.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000708 __ mtc1(scratch1, dst);
709 __ cvt_d_w(dst, dst);
710 if (destination == kCoreRegisters) {
711 // Load the converted smi to dst1 and dst2 in double format.
danno@chromium.org40cb8782011-05-25 07:58:50 +0000712 __ Move(dst1, dst2, dst);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000713 }
714 } else {
715 ASSERT(destination == kCoreRegisters);
716 // Write smi to dst1 and dst2 double format.
717 __ mov(scratch1, object);
718 ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
719 __ push(ra);
sgjesse@chromium.org6db88712011-07-11 11:41:22 +0000720 __ Call(stub.GetCode());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000721 __ pop(ra);
722 }
723
724 __ bind(&done);
lrn@chromium.org7516f052011-03-30 08:52:27 +0000725}
726
727
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000728void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
729 Register object,
730 Register dst,
731 Register heap_number_map,
732 Register scratch1,
733 Register scratch2,
734 Register scratch3,
735 FPURegister double_scratch,
736 Label* not_number) {
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000737 __ AssertRootValue(heap_number_map,
738 Heap::kHeapNumberMapRootIndex,
739 "HeapNumberMap register clobbered.");
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000740 Label done;
741 Label not_in_int32_range;
742
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000743 __ UntagAndJumpIfSmi(dst, object, &done);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000744 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset));
745 __ Branch(not_number, ne, scratch1, Operand(heap_number_map));
746 __ ConvertToInt32(object,
747 dst,
748 scratch1,
749 scratch2,
750 double_scratch,
751 &not_in_int32_range);
752 __ jmp(&done);
753
754 __ bind(&not_in_int32_range);
755 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
756 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset));
757
758 __ EmitOutOfInt32RangeTruncate(dst,
759 scratch1,
760 scratch2,
761 scratch3);
762
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000763 __ bind(&done);
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000764}
765
766
767void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
768 Register int_scratch,
769 Destination destination,
770 FPURegister double_dst,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000771 Register dst_mantissa,
772 Register dst_exponent,
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000773 Register scratch2,
774 FPURegister single_scratch) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000775 ASSERT(!int_scratch.is(scratch2));
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000776 ASSERT(!int_scratch.is(dst_mantissa));
777 ASSERT(!int_scratch.is(dst_exponent));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000778
779 Label done;
780
781 if (CpuFeatures::IsSupported(FPU)) {
782 CpuFeatures::Scope scope(FPU);
783 __ mtc1(int_scratch, single_scratch);
784 __ cvt_d_w(double_dst, single_scratch);
785 if (destination == kCoreRegisters) {
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000786 __ Move(dst_mantissa, dst_exponent, double_dst);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000787 }
788 } else {
789 Label fewer_than_20_useful_bits;
790 // Expected output:
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000791 // | dst_exponent | dst_mantissa |
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000792 // | s | exp | mantissa |
793
794 // Check for zero.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000795 __ mov(dst_exponent, int_scratch);
796 __ mov(dst_mantissa, int_scratch);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000797 __ Branch(&done, eq, int_scratch, Operand(zero_reg));
798
799 // Preload the sign of the value.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000800 __ And(dst_exponent, int_scratch, Operand(HeapNumber::kSignMask));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000801 // Get the absolute value of the object (as an unsigned integer).
802 Label skip_sub;
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000803 __ Branch(&skip_sub, ge, dst_exponent, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000804 __ Subu(int_scratch, zero_reg, int_scratch);
805 __ bind(&skip_sub);
806
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +0000807 // Get mantissa[51:20].
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000808
809 // Get the position of the first set bit.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000810 __ Clz(dst_mantissa, int_scratch);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000811 __ li(scratch2, 31);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000812 __ Subu(dst_mantissa, scratch2, dst_mantissa);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000813
814 // Set the exponent.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000815 __ Addu(scratch2, dst_mantissa, Operand(HeapNumber::kExponentBias));
816 __ Ins(dst_exponent, scratch2,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000817 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
818
819 // Clear the first non null bit.
820 __ li(scratch2, Operand(1));
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000821 __ sllv(scratch2, scratch2, dst_mantissa);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000822 __ li(at, -1);
823 __ Xor(scratch2, scratch2, at);
824 __ And(int_scratch, int_scratch, scratch2);
825
826 // Get the number of bits to set in the lower part of the mantissa.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000827 __ Subu(scratch2, dst_mantissa,
828 Operand(HeapNumber::kMantissaBitsInTopWord));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000829 __ Branch(&fewer_than_20_useful_bits, lt, scratch2, Operand(zero_reg));
830 // Set the higher 20 bits of the mantissa.
831 __ srlv(at, int_scratch, scratch2);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000832 __ or_(dst_exponent, dst_exponent, at);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000833 __ li(at, 32);
834 __ subu(scratch2, at, scratch2);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000835 __ sllv(dst_mantissa, int_scratch, scratch2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000836 __ Branch(&done);
837
838 __ bind(&fewer_than_20_useful_bits);
839 __ li(at, HeapNumber::kMantissaBitsInTopWord);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000840 __ subu(scratch2, at, dst_mantissa);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000841 __ sllv(scratch2, int_scratch, scratch2);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000842 __ Or(dst_exponent, dst_exponent, scratch2);
843 // Set dst_mantissa to 0.
844 __ mov(dst_mantissa, zero_reg);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000845 }
846 __ bind(&done);
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000847}
848
849
850void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
851 Register object,
852 Destination destination,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000853 DoubleRegister double_dst,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000854 DoubleRegister double_scratch,
855 Register dst_mantissa,
856 Register dst_exponent,
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000857 Register heap_number_map,
858 Register scratch1,
859 Register scratch2,
860 FPURegister single_scratch,
861 Label* not_int32) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000862 ASSERT(!scratch1.is(object) && !scratch2.is(object));
863 ASSERT(!scratch1.is(scratch2));
864 ASSERT(!heap_number_map.is(object) &&
865 !heap_number_map.is(scratch1) &&
866 !heap_number_map.is(scratch2));
867
868 Label done, obj_is_not_smi;
869
870 __ JumpIfNotSmi(object, &obj_is_not_smi);
871 __ SmiUntag(scratch1, object);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000872 ConvertIntToDouble(masm, scratch1, destination, double_dst, dst_mantissa,
873 dst_exponent, scratch2, single_scratch);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000874 __ Branch(&done);
875
876 __ bind(&obj_is_not_smi);
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000877 __ AssertRootValue(heap_number_map,
878 Heap::kHeapNumberMapRootIndex,
879 "HeapNumberMap register clobbered.");
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000880 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
881
882 // Load the number.
883 if (CpuFeatures::IsSupported(FPU)) {
884 CpuFeatures::Scope scope(FPU);
885 // Load the double value.
886 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset));
887
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000888 Register except_flag = scratch2;
889 __ EmitFPUTruncate(kRoundToZero,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000890 scratch1,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000891 double_dst,
892 at,
893 double_scratch,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000894 except_flag,
895 kCheckForInexactConversion);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000896
897 // Jump to not_int32 if the operation did not succeed.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000898 __ Branch(not_int32, ne, except_flag, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000899
900 if (destination == kCoreRegisters) {
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000901 __ Move(dst_mantissa, dst_exponent, double_dst);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000902 }
903
904 } else {
905 ASSERT(!scratch1.is(object) && !scratch2.is(object));
906 // Load the double value in the destination registers.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000907 bool save_registers = object.is(dst_mantissa) || object.is(dst_exponent);
908 if (save_registers) {
909 // Save both output registers, because the other one probably holds
910 // an important value too.
911 __ Push(dst_exponent, dst_mantissa);
912 }
913 __ lw(dst_exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
914 __ lw(dst_mantissa, FieldMemOperand(object, HeapNumber::kMantissaOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000915
916 // Check for 0 and -0.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000917 Label zero;
918 __ And(scratch1, dst_exponent, Operand(~HeapNumber::kSignMask));
919 __ Or(scratch1, scratch1, Operand(dst_mantissa));
920 __ Branch(&zero, eq, scratch1, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000921
922 // Check that the value can be exactly represented by a 32-bit integer.
923 // Jump to not_int32 if that's not the case.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000924 Label restore_input_and_miss;
925 DoubleIs32BitInteger(masm, dst_exponent, dst_mantissa, scratch1, scratch2,
926 &restore_input_and_miss);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000927
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000928 // dst_* were trashed. Reload the double value.
929 if (save_registers) {
930 __ Pop(dst_exponent, dst_mantissa);
931 }
932 __ lw(dst_exponent, FieldMemOperand(object, HeapNumber::kExponentOffset));
933 __ lw(dst_mantissa, FieldMemOperand(object, HeapNumber::kMantissaOffset));
934 __ Branch(&done);
935
936 __ bind(&restore_input_and_miss);
937 if (save_registers) {
938 __ Pop(dst_exponent, dst_mantissa);
939 }
940 __ Branch(not_int32);
941
942 __ bind(&zero);
943 if (save_registers) {
944 __ Drop(2);
945 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000946 }
947
948 __ bind(&done);
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000949}
950
951
952void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
953 Register object,
954 Register dst,
955 Register heap_number_map,
956 Register scratch1,
957 Register scratch2,
958 Register scratch3,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000959 DoubleRegister double_scratch0,
960 DoubleRegister double_scratch1,
karlklose@chromium.org83a47282011-05-11 11:54:09 +0000961 Label* not_int32) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000962 ASSERT(!dst.is(object));
963 ASSERT(!scratch1.is(object) && !scratch2.is(object) && !scratch3.is(object));
964 ASSERT(!scratch1.is(scratch2) &&
965 !scratch1.is(scratch3) &&
966 !scratch2.is(scratch3));
967
ulan@chromium.org8e8d8822012-11-23 14:36:46 +0000968 Label done, maybe_undefined;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000969
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000970 __ UntagAndJumpIfSmi(dst, object, &done);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000971
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000972 __ AssertRootValue(heap_number_map,
973 Heap::kHeapNumberMapRootIndex,
974 "HeapNumberMap register clobbered.");
ulan@chromium.org8e8d8822012-11-23 14:36:46 +0000975
976 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000977
978 // Object is a heap number.
979 // Convert the floating point value to a 32-bit integer.
980 if (CpuFeatures::IsSupported(FPU)) {
981 CpuFeatures::Scope scope(FPU);
982 // Load the double value.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000983 __ ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000984
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000985 Register except_flag = scratch2;
986 __ EmitFPUTruncate(kRoundToZero,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000987 dst,
988 double_scratch0,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000989 scratch1,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +0000990 double_scratch1,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000991 except_flag,
992 kCheckForInexactConversion);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000993
994 // Jump to not_int32 if the operation did not succeed.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000995 __ Branch(not_int32, ne, except_flag, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +0000996 } else {
997 // Load the double value in the destination registers.
998 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kExponentOffset));
999 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMantissaOffset));
1000
1001 // Check for 0 and -0.
1002 __ And(dst, scratch1, Operand(~HeapNumber::kSignMask));
1003 __ Or(dst, scratch2, Operand(dst));
1004 __ Branch(&done, eq, dst, Operand(zero_reg));
1005
1006 DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32);
1007
1008 // Registers state after DoubleIs32BitInteger.
1009 // dst: mantissa[51:20].
1010 // scratch2: 1
1011
1012 // Shift back the higher bits of the mantissa.
1013 __ srlv(dst, dst, scratch3);
1014 // Set the implicit first bit.
1015 __ li(at, 32);
1016 __ subu(scratch3, at, scratch3);
1017 __ sllv(scratch2, scratch2, scratch3);
1018 __ Or(dst, dst, scratch2);
1019 // Set the sign.
1020 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
1021 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
1022 Label skip_sub;
1023 __ Branch(&skip_sub, ge, scratch1, Operand(zero_reg));
1024 __ Subu(dst, zero_reg, dst);
1025 __ bind(&skip_sub);
1026 }
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001027 __ Branch(&done);
1028
1029 __ bind(&maybe_undefined);
1030 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1031 __ Branch(not_int32, ne, object, Operand(at));
1032 // |undefined| is truncated to 0.
1033 __ li(dst, Operand(Smi::FromInt(0)));
1034 // Fall through.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001035
1036 __ bind(&done);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00001037}
1038
1039
1040void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001041 Register src_exponent,
1042 Register src_mantissa,
karlklose@chromium.org83a47282011-05-11 11:54:09 +00001043 Register dst,
1044 Register scratch,
1045 Label* not_int32) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001046 // Get exponent alone in scratch.
1047 __ Ext(scratch,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001048 src_exponent,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001049 HeapNumber::kExponentShift,
1050 HeapNumber::kExponentBits);
1051
1052 // Substract the bias from the exponent.
1053 __ Subu(scratch, scratch, Operand(HeapNumber::kExponentBias));
1054
1055 // src1: higher (exponent) part of the double value.
1056 // src2: lower (mantissa) part of the double value.
1057 // scratch: unbiased exponent.
1058
1059 // Fast cases. Check for obvious non 32-bit integer values.
1060 // Negative exponent cannot yield 32-bit integers.
1061 __ Branch(not_int32, lt, scratch, Operand(zero_reg));
1062 // Exponent greater than 31 cannot yield 32-bit integers.
1063 // Also, a positive value with an exponent equal to 31 is outside of the
1064 // signed 32-bit integer range.
1065 // Another way to put it is that if (exponent - signbit) > 30 then the
1066 // number cannot be represented as an int32.
1067 Register tmp = dst;
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001068 __ srl(at, src_exponent, 31);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001069 __ subu(tmp, scratch, at);
1070 __ Branch(not_int32, gt, tmp, Operand(30));
1071 // - Bits [21:0] in the mantissa are not null.
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001072 __ And(tmp, src_mantissa, 0x3fffff);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001073 __ Branch(not_int32, ne, tmp, Operand(zero_reg));
1074
1075 // Otherwise the exponent needs to be big enough to shift left all the
1076 // non zero bits left. So we need the (30 - exponent) last bits of the
1077 // 31 higher bits of the mantissa to be null.
1078 // Because bits [21:0] are null, we can check instead that the
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00001079 // (32 - exponent) last bits of the 32 higher bits of the mantissa are null.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001080
1081 // Get the 32 higher bits of the mantissa in dst.
1082 __ Ext(dst,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001083 src_mantissa,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001084 HeapNumber::kMantissaBitsInTopWord,
1085 32 - HeapNumber::kMantissaBitsInTopWord);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001086 __ sll(at, src_exponent, HeapNumber::kNonMantissaBitsInTopWord);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001087 __ or_(dst, dst, at);
1088
1089 // Create the mask and test the lower bits (of the higher bits).
1090 __ li(at, 32);
1091 __ subu(scratch, at, scratch);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00001092 __ li(src_mantissa, 1);
1093 __ sllv(src_exponent, src_mantissa, scratch);
1094 __ Subu(src_exponent, src_exponent, Operand(1));
1095 __ And(src_exponent, dst, src_exponent);
1096 __ Branch(not_int32, ne, src_exponent, Operand(zero_reg));
karlklose@chromium.org83a47282011-05-11 11:54:09 +00001097}
1098
1099
1100void FloatingPointHelper::CallCCodeForDoubleOperation(
1101 MacroAssembler* masm,
1102 Token::Value op,
1103 Register heap_number_result,
1104 Register scratch) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001105 // Using core registers:
1106 // a0: Left value (least significant part of mantissa).
1107 // a1: Left value (sign, exponent, top of mantissa).
1108 // a2: Right value (least significant part of mantissa).
1109 // a3: Right value (sign, exponent, top of mantissa).
1110
1111 // Assert that heap_number_result is saved.
1112 // We currently always use s0 to pass it.
1113 ASSERT(heap_number_result.is(s0));
1114
1115 // Push the current return address before the C call.
1116 __ push(ra);
1117 __ PrepareCallCFunction(4, scratch); // Two doubles are 4 arguments.
1118 if (!IsMipsSoftFloatABI) {
1119 CpuFeatures::Scope scope(FPU);
1120 // We are not using MIPS FPU instructions, and parameters for the runtime
1121 // function call are prepaired in a0-a3 registers, but function we are
1122 // calling is compiled with hard-float flag and expecting hard float ABI
1123 // (parameters in f12/f14 registers). We need to copy parameters from
1124 // a0-a3 registers to f12/f14 register pairs.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001125 __ Move(f12, a0, a1);
1126 __ Move(f14, a2, a3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001127 }
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001128 {
1129 AllowExternalCallThatCantCauseGC scope(masm);
1130 __ CallCFunction(
1131 ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
1132 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001133 // Store answer in the overwritable heap number.
1134 if (!IsMipsSoftFloatABI) {
1135 CpuFeatures::Scope scope(FPU);
1136 // Double returned in register f0.
1137 __ sdc1(f0, FieldMemOperand(heap_number_result, HeapNumber::kValueOffset));
1138 } else {
1139 // Double returned in registers v0 and v1.
1140 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset));
1141 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset));
1142 }
1143 // Place heap_number_result in v0 and return to the pushed return address.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001144 __ pop(ra);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001145 __ Ret(USE_DELAY_SLOT);
1146 __ mov(v0, heap_number_result);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001147}
1148
1149
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00001150bool WriteInt32ToHeapNumberStub::IsPregenerated() {
1151 // These variants are compiled ahead of time. See next method.
1152 if (the_int_.is(a1) &&
1153 the_heap_number_.is(v0) &&
1154 scratch_.is(a2) &&
1155 sign_.is(a3)) {
1156 return true;
1157 }
1158 if (the_int_.is(a2) &&
1159 the_heap_number_.is(v0) &&
1160 scratch_.is(a3) &&
1161 sign_.is(a0)) {
1162 return true;
1163 }
1164 // Other register combinations are generated as and when they are needed,
1165 // so it is unsafe to call them from stubs (we can't generate a stub while
1166 // we are generating a stub).
1167 return false;
1168}
1169
1170
1171void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
1172 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
1173 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
1174 stub1.GetCode()->set_is_pregenerated(true);
1175 stub2.GetCode()->set_is_pregenerated(true);
1176}
1177
1178
lrn@chromium.org7516f052011-03-30 08:52:27 +00001179// See comment for class, this does NOT work for int32's that are in Smi range.
1180void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001181 Label max_negative_int;
1182 // the_int_ has the answer which is a signed int32 but not a Smi.
1183 // We test for the special value that has a different exponent.
1184 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
1185 // Test sign, and save for later conditionals.
1186 __ And(sign_, the_int_, Operand(0x80000000u));
1187 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u));
1188
1189 // Set up the correct exponent in scratch_. All non-Smi int32s have the same.
1190 // A non-Smi integer is 1.xxx * 2^30 so the exponent is 30 (biased).
1191 uint32_t non_smi_exponent =
1192 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
1193 __ li(scratch_, Operand(non_smi_exponent));
1194 // Set the sign bit in scratch_ if the value was negative.
1195 __ or_(scratch_, scratch_, sign_);
1196 // Subtract from 0 if the value was negative.
1197 __ subu(at, zero_reg, the_int_);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00001198 __ Movn(the_int_, at, sign_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001199 // We should be masking the implict first digit of the mantissa away here,
1200 // but it just ends up combining harmlessly with the last digit of the
1201 // exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
1202 // the most significant 1 to hit the last bit of the 12 bit sign and exponent.
1203 ASSERT(((1 << HeapNumber::kExponentShift) & non_smi_exponent) != 0);
1204 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
1205 __ srl(at, the_int_, shift_distance);
1206 __ or_(scratch_, scratch_, at);
1207 __ sw(scratch_, FieldMemOperand(the_heap_number_,
1208 HeapNumber::kExponentOffset));
1209 __ sll(scratch_, the_int_, 32 - shift_distance);
1210 __ sw(scratch_, FieldMemOperand(the_heap_number_,
1211 HeapNumber::kMantissaOffset));
1212 __ Ret();
1213
1214 __ bind(&max_negative_int);
1215 // The max negative int32 is stored as a positive number in the mantissa of
1216 // a double because it uses a sign bit instead of using two's complement.
1217 // The actual mantissa bits stored are all 0 because the implicit most
1218 // significant 1 bit is not stored.
1219 non_smi_exponent += 1 << HeapNumber::kExponentShift;
1220 __ li(scratch_, Operand(HeapNumber::kSignMask | non_smi_exponent));
1221 __ sw(scratch_,
1222 FieldMemOperand(the_heap_number_, HeapNumber::kExponentOffset));
1223 __ mov(scratch_, zero_reg);
1224 __ sw(scratch_,
1225 FieldMemOperand(the_heap_number_, HeapNumber::kMantissaOffset));
1226 __ Ret();
1227}
1228
1229
1230// Handle the case where the lhs and rhs are the same object.
1231// Equality is almost reflexive (everything but NaN), so this is a test
1232// for "identity and not NaN".
1233static void EmitIdenticalObjectComparison(MacroAssembler* masm,
1234 Label* slow,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001235 Condition cc) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001236 Label not_identical;
1237 Label heap_number, return_equal;
1238 Register exp_mask_reg = t5;
1239
1240 __ Branch(&not_identical, ne, a0, Operand(a1));
1241
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001242 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001243
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001244 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
1245 // so we do the second best thing - test it ourselves.
1246 // They are both equal and they are not both Smis so both of them are not
1247 // Smis. If it's not a heap number, then return equal.
1248 if (cc == less || cc == greater) {
1249 __ GetObjectType(a0, t4, t4);
1250 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE));
1251 } else {
1252 __ GetObjectType(a0, t4, t4);
1253 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE));
1254 // Comparing JS objects with <=, >= is complicated.
1255 if (cc != eq) {
1256 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE));
1257 // Normally here we fall through to return_equal, but undefined is
1258 // special: (undefined == undefined) == true, but
1259 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
1260 if (cc == less_equal || cc == greater_equal) {
1261 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE));
1262 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1263 __ Branch(&return_equal, ne, a0, Operand(t2));
1264 if (cc == le) {
1265 // undefined <= undefined should fail.
1266 __ li(v0, Operand(GREATER));
1267 } else {
1268 // undefined >= undefined should fail.
1269 __ li(v0, Operand(LESS));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001270 }
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001271 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001272 }
1273 }
1274 }
1275
1276 __ bind(&return_equal);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001277
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001278 if (cc == less) {
1279 __ li(v0, Operand(GREATER)); // Things aren't less than themselves.
1280 } else if (cc == greater) {
1281 __ li(v0, Operand(LESS)); // Things aren't greater than themselves.
1282 } else {
1283 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves.
1284 }
1285 __ Ret();
1286
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001287 // For less and greater we don't have to check for NaN since the result of
1288 // x < x is false regardless. For the others here is some code to check
1289 // for NaN.
1290 if (cc != lt && cc != gt) {
1291 __ bind(&heap_number);
1292 // It is a heap number, so return non-equal if it's NaN and equal if it's
1293 // not NaN.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001294
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001295 // The representation of NaN values has all exponent bits (52..62) set,
1296 // and not all mantissa bits (0..51) clear.
1297 // Read top bits of double representation (second word of value).
1298 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1299 // Test that exponent bits are all set.
1300 __ And(t3, t2, Operand(exp_mask_reg));
1301 // If all bits not set (ne cond), then not a NaN, objects are equal.
1302 __ Branch(&return_equal, ne, t3, Operand(exp_mask_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001303
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001304 // Shift out flag and all exponent bits, retaining only mantissa.
1305 __ sll(t2, t2, HeapNumber::kNonMantissaBitsInTopWord);
1306 // Or with all low-bits of mantissa.
1307 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
1308 __ Or(v0, t3, Operand(t2));
1309 // For equal we already have the right value in v0: Return zero (equal)
1310 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
1311 // not (it's a NaN). For <= and >= we need to load v0 with the failing
1312 // value if it's a NaN.
1313 if (cc != eq) {
1314 // All-zero means Infinity means equal.
1315 __ Ret(eq, v0, Operand(zero_reg));
1316 if (cc == le) {
1317 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
1318 } else {
1319 __ li(v0, Operand(LESS)); // NaN >= NaN should fail.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001320 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001321 }
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001322 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001323 }
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001324 // No fall through here.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001325
1326 __ bind(&not_identical);
1327}
1328
1329
1330static void EmitSmiNonsmiComparison(MacroAssembler* masm,
1331 Register lhs,
1332 Register rhs,
1333 Label* both_loaded_as_doubles,
1334 Label* slow,
1335 bool strict) {
1336 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1337 (lhs.is(a1) && rhs.is(a0)));
1338
1339 Label lhs_is_smi;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001340 __ JumpIfSmi(lhs, &lhs_is_smi);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001341 // Rhs is a Smi.
1342 // Check whether the non-smi is a heap number.
1343 __ GetObjectType(lhs, t4, t4);
1344 if (strict) {
1345 // If lhs was not a number and rhs was a Smi then strict equality cannot
1346 // succeed. Return non-equal (lhs is already not zero).
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001347 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001348 __ mov(v0, lhs);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001349 } else {
1350 // Smi compared non-strictly with a non-Smi non-heap-number. Call
1351 // the runtime.
1352 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE));
1353 }
1354
1355 // Rhs is a smi, lhs is a number.
1356 // Convert smi rhs to double.
1357 if (CpuFeatures::IsSupported(FPU)) {
1358 CpuFeatures::Scope scope(FPU);
1359 __ sra(at, rhs, kSmiTagSize);
1360 __ mtc1(at, f14);
1361 __ cvt_d_w(f14, f14);
1362 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1363 } else {
1364 // Load lhs to a double in a2, a3.
1365 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4));
1366 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1367
1368 // Write Smi from rhs to a1 and a0 in double format. t5 is scratch.
1369 __ mov(t6, rhs);
1370 ConvertToDoubleStub stub1(a1, a0, t6, t5);
1371 __ push(ra);
sgjesse@chromium.org6db88712011-07-11 11:41:22 +00001372 __ Call(stub1.GetCode());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001373
1374 __ pop(ra);
1375 }
1376
1377 // We now have both loaded as doubles.
1378 __ jmp(both_loaded_as_doubles);
1379
1380 __ bind(&lhs_is_smi);
1381 // Lhs is a Smi. Check whether the non-smi is a heap number.
1382 __ GetObjectType(rhs, t4, t4);
1383 if (strict) {
1384 // If lhs was not a number and rhs was a Smi then strict equality cannot
1385 // succeed. Return non-equal.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001386 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001387 __ li(v0, Operand(1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001388 } else {
1389 // Smi compared non-strictly with a non-Smi non-heap-number. Call
1390 // the runtime.
1391 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE));
1392 }
1393
1394 // Lhs is a smi, rhs is a number.
1395 // Convert smi lhs to double.
1396 if (CpuFeatures::IsSupported(FPU)) {
1397 CpuFeatures::Scope scope(FPU);
1398 __ sra(at, lhs, kSmiTagSize);
1399 __ mtc1(at, f12);
1400 __ cvt_d_w(f12, f12);
1401 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1402 } else {
1403 // Convert lhs to a double format. t5 is scratch.
1404 __ mov(t6, lhs);
1405 ConvertToDoubleStub stub2(a3, a2, t6, t5);
1406 __ push(ra);
sgjesse@chromium.org6db88712011-07-11 11:41:22 +00001407 __ Call(stub2.GetCode());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001408 __ pop(ra);
1409 // Load rhs to a double in a1, a0.
1410 if (rhs.is(a0)) {
1411 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1412 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1413 } else {
1414 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1415 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1416 }
1417 }
1418 // Fall through to both_loaded_as_doubles.
lrn@chromium.org7516f052011-03-30 08:52:27 +00001419}
1420
1421
1422void EmitNanCheck(MacroAssembler* masm, Condition cc) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001423 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
1424 if (CpuFeatures::IsSupported(FPU)) {
1425 CpuFeatures::Scope scope(FPU);
1426 // Lhs and rhs are already loaded to f12 and f14 register pairs.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001427 __ Move(t0, t1, f14);
1428 __ Move(t2, t3, f12);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001429 } else {
1430 // Lhs and rhs are already loaded to GP registers.
1431 __ mov(t0, a0); // a0 has LS 32 bits of rhs.
1432 __ mov(t1, a1); // a1 has MS 32 bits of rhs.
1433 __ mov(t2, a2); // a2 has LS 32 bits of lhs.
1434 __ mov(t3, a3); // a3 has MS 32 bits of lhs.
1435 }
1436 Register rhs_exponent = exp_first ? t0 : t1;
1437 Register lhs_exponent = exp_first ? t2 : t3;
1438 Register rhs_mantissa = exp_first ? t1 : t0;
1439 Register lhs_mantissa = exp_first ? t3 : t2;
1440 Label one_is_nan, neither_is_nan;
1441 Label lhs_not_nan_exp_mask_is_loaded;
1442
1443 Register exp_mask_reg = t4;
1444 __ li(exp_mask_reg, HeapNumber::kExponentMask);
1445 __ and_(t5, lhs_exponent, exp_mask_reg);
1446 __ Branch(&lhs_not_nan_exp_mask_is_loaded, ne, t5, Operand(exp_mask_reg));
1447
1448 __ sll(t5, lhs_exponent, HeapNumber::kNonMantissaBitsInTopWord);
1449 __ Branch(&one_is_nan, ne, t5, Operand(zero_reg));
1450
1451 __ Branch(&one_is_nan, ne, lhs_mantissa, Operand(zero_reg));
1452
1453 __ li(exp_mask_reg, HeapNumber::kExponentMask);
1454 __ bind(&lhs_not_nan_exp_mask_is_loaded);
1455 __ and_(t5, rhs_exponent, exp_mask_reg);
1456
1457 __ Branch(&neither_is_nan, ne, t5, Operand(exp_mask_reg));
1458
1459 __ sll(t5, rhs_exponent, HeapNumber::kNonMantissaBitsInTopWord);
1460 __ Branch(&one_is_nan, ne, t5, Operand(zero_reg));
1461
1462 __ Branch(&neither_is_nan, eq, rhs_mantissa, Operand(zero_reg));
1463
1464 __ bind(&one_is_nan);
1465 // NaN comparisons always fail.
1466 // Load whatever we need in v0 to make the comparison fail.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001467
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001468 if (cc == lt || cc == le) {
1469 __ li(v0, Operand(GREATER));
1470 } else {
1471 __ li(v0, Operand(LESS));
1472 }
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001473 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001474
1475 __ bind(&neither_is_nan);
1476}
1477
1478
1479static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
1480 // f12 and f14 have the two doubles. Neither is a NaN.
1481 // Call a native function to do a comparison between two non-NaNs.
1482 // Call C routine that may not cause GC or other trouble.
1483 // We use a call_was and return manually because we need arguments slots to
1484 // be freed.
1485
1486 Label return_result_not_equal, return_result_equal;
1487 if (cc == eq) {
1488 // Doubles are not equal unless they have the same bit pattern.
1489 // Exception: 0 and -0.
1490 bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
1491 if (CpuFeatures::IsSupported(FPU)) {
danno@chromium.org40cb8782011-05-25 07:58:50 +00001492 CpuFeatures::Scope scope(FPU);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001493 // Lhs and rhs are already loaded to f12 and f14 register pairs.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001494 __ Move(t0, t1, f14);
1495 __ Move(t2, t3, f12);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001496 } else {
1497 // Lhs and rhs are already loaded to GP registers.
1498 __ mov(t0, a0); // a0 has LS 32 bits of rhs.
1499 __ mov(t1, a1); // a1 has MS 32 bits of rhs.
1500 __ mov(t2, a2); // a2 has LS 32 bits of lhs.
1501 __ mov(t3, a3); // a3 has MS 32 bits of lhs.
1502 }
1503 Register rhs_exponent = exp_first ? t0 : t1;
1504 Register lhs_exponent = exp_first ? t2 : t3;
1505 Register rhs_mantissa = exp_first ? t1 : t0;
1506 Register lhs_mantissa = exp_first ? t3 : t2;
1507
1508 __ xor_(v0, rhs_mantissa, lhs_mantissa);
1509 __ Branch(&return_result_not_equal, ne, v0, Operand(zero_reg));
1510
1511 __ subu(v0, rhs_exponent, lhs_exponent);
1512 __ Branch(&return_result_equal, eq, v0, Operand(zero_reg));
1513 // 0, -0 case.
1514 __ sll(rhs_exponent, rhs_exponent, kSmiTagSize);
1515 __ sll(lhs_exponent, lhs_exponent, kSmiTagSize);
1516 __ or_(t4, rhs_exponent, lhs_exponent);
1517 __ or_(t4, t4, rhs_mantissa);
1518
1519 __ Branch(&return_result_not_equal, ne, t4, Operand(zero_reg));
1520
1521 __ bind(&return_result_equal);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001522
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001523 __ li(v0, Operand(EQUAL));
1524 __ Ret();
1525 }
1526
1527 __ bind(&return_result_not_equal);
1528
1529 if (!CpuFeatures::IsSupported(FPU)) {
1530 __ push(ra);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001531 __ PrepareCallCFunction(0, 2, t4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001532 if (!IsMipsSoftFloatABI) {
1533 // We are not using MIPS FPU instructions, and parameters for the runtime
1534 // function call are prepaired in a0-a3 registers, but function we are
1535 // calling is compiled with hard-float flag and expecting hard float ABI
1536 // (parameters in f12/f14 registers). We need to copy parameters from
1537 // a0-a3 registers to f12/f14 register pairs.
danno@chromium.org40cb8782011-05-25 07:58:50 +00001538 __ Move(f12, a0, a1);
1539 __ Move(f14, a2, a3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001540 }
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00001541
1542 AllowExternalCallThatCantCauseGC scope(masm);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001543 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
1544 0, 2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001545 __ pop(ra); // Because this function returns int, result is in v0.
1546 __ Ret();
1547 } else {
1548 CpuFeatures::Scope scope(FPU);
1549 Label equal, less_than;
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001550 __ BranchF(&equal, NULL, eq, f12, f14);
1551 __ BranchF(&less_than, NULL, lt, f12, f14);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001552
1553 // Not equal, not less, not NaN, must be greater.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001554
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001555 __ li(v0, Operand(GREATER));
1556 __ Ret();
1557
1558 __ bind(&equal);
1559 __ li(v0, Operand(EQUAL));
1560 __ Ret();
1561
1562 __ bind(&less_than);
1563 __ li(v0, Operand(LESS));
1564 __ Ret();
1565 }
1566}
1567
1568
1569static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1570 Register lhs,
1571 Register rhs) {
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001572 // If either operand is a JS object or an oddball value, then they are
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001573 // not equal since their pointers are different.
1574 // There is no test for undetectability in strict equality.
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00001575 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001576 Label first_non_object;
1577 // Get the type of the first operand into a2 and compare it with
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001578 // FIRST_SPEC_OBJECT_TYPE.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001579 __ GetObjectType(lhs, a2, a2);
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001580 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001581
1582 // Return non-zero.
1583 Label return_not_equal;
1584 __ bind(&return_not_equal);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001585 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001586 __ li(v0, Operand(1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001587
1588 __ bind(&first_non_object);
1589 // Check for oddballs: true, false, null, undefined.
1590 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1591
1592 __ GetObjectType(rhs, a3, a3);
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001593 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001594
1595 // Check for oddballs: true, false, null, undefined.
1596 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
1597
1598 // Now that we have the types we might as well check for symbol-symbol.
1599 // Ensure that no non-strings have the symbol bit set.
1600 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
1601 STATIC_ASSERT(kSymbolTag != 0);
1602 __ And(t2, a2, Operand(a3));
1603 __ And(t0, t2, Operand(kIsSymbolMask));
1604 __ Branch(&return_not_equal, ne, t0, Operand(zero_reg));
1605}
1606
1607
1608static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1609 Register lhs,
1610 Register rhs,
1611 Label* both_loaded_as_doubles,
1612 Label* not_heap_numbers,
1613 Label* slow) {
1614 __ GetObjectType(lhs, a3, a2);
1615 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
1616 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
1617 // If first was a heap number & second wasn't, go to slow case.
1618 __ Branch(slow, ne, a3, Operand(a2));
1619
1620 // Both are heap numbers. Load them up then jump to the code we have
1621 // for that.
1622 if (CpuFeatures::IsSupported(FPU)) {
1623 CpuFeatures::Scope scope(FPU);
1624 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1625 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1626 } else {
1627 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset));
1628 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4));
1629 if (rhs.is(a0)) {
1630 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1631 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1632 } else {
1633 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset));
1634 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4));
1635 }
1636 }
1637 __ jmp(both_loaded_as_doubles);
1638}
1639
1640
1641// Fast negative check for symbol-to-symbol equality.
1642static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
1643 Register lhs,
1644 Register rhs,
1645 Label* possible_strings,
1646 Label* not_both_strings) {
1647 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1648 (lhs.is(a1) && rhs.is(a0)));
1649
1650 // a2 is object type of lhs.
1651 // Ensure that no non-strings have the symbol bit set.
1652 Label object_test;
1653 STATIC_ASSERT(kSymbolTag != 0);
1654 __ And(at, a2, Operand(kIsNotStringMask));
1655 __ Branch(&object_test, ne, at, Operand(zero_reg));
1656 __ And(at, a2, Operand(kIsSymbolMask));
1657 __ Branch(possible_strings, eq, at, Operand(zero_reg));
1658 __ GetObjectType(rhs, a3, a3);
1659 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE));
1660 __ And(at, a3, Operand(kIsSymbolMask));
1661 __ Branch(possible_strings, eq, at, Operand(zero_reg));
1662
1663 // Both are symbols. We already checked they weren't the same pointer
1664 // so they are not equal.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001665 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001666 __ li(v0, Operand(1)); // Non-zero indicates not equal.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001667
1668 __ bind(&object_test);
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001669 __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001670 __ GetObjectType(rhs, a2, a3);
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00001671 __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001672
1673 // If both objects are undetectable, they are equal. Otherwise, they
1674 // are not equal, since they are different objects and an object is not
1675 // equal to undefined.
1676 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset));
1677 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset));
1678 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset));
1679 __ and_(a0, a2, a3);
1680 __ And(a0, a0, Operand(1 << Map::kIsUndetectable));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001681 __ Ret(USE_DELAY_SLOT);
1682 __ xori(v0, a0, 1 << Map::kIsUndetectable);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001683}
1684
1685
1686void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
1687 Register object,
1688 Register result,
1689 Register scratch1,
1690 Register scratch2,
1691 Register scratch3,
1692 bool object_is_smi,
1693 Label* not_found) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001694 // Use of registers. Register result is used as a temporary.
1695 Register number_string_cache = result;
1696 Register mask = scratch3;
1697
1698 // Load the number string cache.
1699 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
1700
1701 // Make the hash mask from the length of the number string cache. It
1702 // contains two elements (number and string) for each cache entry.
1703 __ lw(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
1704 // Divide length by two (length is a smi).
1705 __ sra(mask, mask, kSmiTagSize + 1);
1706 __ Addu(mask, mask, -1); // Make mask.
1707
1708 // Calculate the entry in the number string cache. The hash value in the
1709 // number string cache for smis is just the smi value, and the hash for
1710 // doubles is the xor of the upper and lower words. See
1711 // Heap::GetNumberStringCache.
1712 Isolate* isolate = masm->isolate();
1713 Label is_smi;
1714 Label load_result_from_cache;
1715 if (!object_is_smi) {
1716 __ JumpIfSmi(object, &is_smi);
1717 if (CpuFeatures::IsSupported(FPU)) {
1718 CpuFeatures::Scope scope(FPU);
1719 __ CheckMap(object,
1720 scratch1,
1721 Heap::kHeapNumberMapRootIndex,
1722 not_found,
danno@chromium.org40cb8782011-05-25 07:58:50 +00001723 DONT_DO_SMI_CHECK);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001724
1725 STATIC_ASSERT(8 == kDoubleSize);
1726 __ Addu(scratch1,
1727 object,
1728 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
1729 __ lw(scratch2, MemOperand(scratch1, kPointerSize));
1730 __ lw(scratch1, MemOperand(scratch1, 0));
1731 __ Xor(scratch1, scratch1, Operand(scratch2));
1732 __ And(scratch1, scratch1, Operand(mask));
1733
1734 // Calculate address of entry in string cache: each entry consists
1735 // of two pointer sized fields.
1736 __ sll(scratch1, scratch1, kPointerSizeLog2 + 1);
1737 __ Addu(scratch1, number_string_cache, scratch1);
1738
1739 Register probe = mask;
1740 __ lw(probe,
1741 FieldMemOperand(scratch1, FixedArray::kHeaderSize));
1742 __ JumpIfSmi(probe, not_found);
1743 __ ldc1(f12, FieldMemOperand(object, HeapNumber::kValueOffset));
1744 __ ldc1(f14, FieldMemOperand(probe, HeapNumber::kValueOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001745 __ BranchF(&load_result_from_cache, NULL, eq, f12, f14);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001746 __ Branch(not_found);
1747 } else {
1748 // Note that there is no cache check for non-FPU case, even though
1749 // it seems there could be. May be a tiny opimization for non-FPU
1750 // cores.
1751 __ Branch(not_found);
1752 }
1753 }
1754
1755 __ bind(&is_smi);
1756 Register scratch = scratch1;
1757 __ sra(scratch, object, 1); // Shift away the tag.
1758 __ And(scratch, mask, Operand(scratch));
1759
1760 // Calculate address of entry in string cache: each entry consists
1761 // of two pointer sized fields.
1762 __ sll(scratch, scratch, kPointerSizeLog2 + 1);
1763 __ Addu(scratch, number_string_cache, scratch);
1764
1765 // Check if the entry is the smi we are looking for.
1766 Register probe = mask;
1767 __ lw(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1768 __ Branch(not_found, ne, object, Operand(probe));
1769
1770 // Get the result from the cache.
1771 __ bind(&load_result_from_cache);
1772 __ lw(result,
1773 FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
1774
1775 __ IncrementCounter(isolate->counters()->number_to_string_native(),
1776 1,
1777 scratch1,
1778 scratch2);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001779}
1780
1781
1782void NumberToStringStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001783 Label runtime;
1784
1785 __ lw(a1, MemOperand(sp, 0));
1786
1787 // Generate code to lookup number in the number string cache.
1788 GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, false, &runtime);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00001789 __ DropAndRet(1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001790
1791 __ bind(&runtime);
1792 // Handle number to string in the runtime system if not found in the cache.
1793 __ TailCallRuntime(Runtime::kNumberToString, 1, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001794}
1795
1796
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001797static void ICCompareStub_CheckInputType(MacroAssembler* masm,
1798 Register input,
1799 Register scratch,
1800 CompareIC::State expected,
1801 Label* fail) {
1802 Label ok;
1803 if (expected == CompareIC::SMI) {
1804 __ JumpIfNotSmi(input, fail);
1805 } else if (expected == CompareIC::HEAP_NUMBER) {
1806 __ JumpIfSmi(input, &ok);
1807 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
1808 DONT_DO_SMI_CHECK);
1809 }
1810 // We could be strict about symbol/string here, but as long as
1811 // hydrogen doesn't care, the stub doesn't have to care either.
1812 __ bind(&ok);
1813}
1814
1815
1816// On entry a1 and a2 are the values to be compared.
1817// On exit a0 is 0, positive or negative to indicate the result of
1818// the comparison.
1819void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1820 Register lhs = a1;
1821 Register rhs = a0;
1822 Condition cc = GetCondition();
1823
1824 Label miss;
1825 ICCompareStub_CheckInputType(masm, lhs, a2, left_, &miss);
1826 ICCompareStub_CheckInputType(masm, rhs, a3, right_, &miss);
1827
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001828 Label slow; // Call builtin.
1829 Label not_smis, both_loaded_as_doubles;
1830
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001831 Label not_two_smis, smi_done;
1832 __ Or(a2, a1, a0);
1833 __ JumpIfNotSmi(a2, &not_two_smis);
1834 __ sra(a1, a1, 1);
1835 __ sra(a0, a0, 1);
1836 __ Ret(USE_DELAY_SLOT);
1837 __ subu(v0, a1, a0);
1838 __ bind(&not_two_smis);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001839
1840 // NOTICE! This code is only reached after a smi-fast-case check, so
1841 // it is certain that at least one operand isn't a smi.
1842
1843 // Handle the case where the objects are identical. Either returns the answer
1844 // or goes to slow. Only falls through if the objects were not identical.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001845 EmitIdenticalObjectComparison(masm, &slow, cc);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001846
1847 // If either is a Smi (we know that not both are), then they can only
1848 // be strictly equal if the other is a HeapNumber.
1849 STATIC_ASSERT(kSmiTag == 0);
1850 ASSERT_EQ(0, Smi::FromInt(0));
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001851 __ And(t2, lhs, Operand(rhs));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001852 __ JumpIfNotSmi(t2, &not_smis, t0);
1853 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
1854 // 1) Return the answer.
1855 // 2) Go to slow.
1856 // 3) Fall through to both_loaded_as_doubles.
1857 // 4) Jump to rhs_not_nan.
1858 // In cases 3 and 4 we have found out we were dealing with a number-number
1859 // comparison and the numbers have been loaded into f12 and f14 as doubles,
1860 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001861 EmitSmiNonsmiComparison(masm, lhs, rhs,
1862 &both_loaded_as_doubles, &slow, strict());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001863
1864 __ bind(&both_loaded_as_doubles);
1865 // f12, f14 are the double representations of the left hand side
1866 // and the right hand side if we have FPU. Otherwise a2, a3 represent
1867 // left hand side and a0, a1 represent right hand side.
1868
1869 Isolate* isolate = masm->isolate();
1870 if (CpuFeatures::IsSupported(FPU)) {
1871 CpuFeatures::Scope scope(FPU);
1872 Label nan;
1873 __ li(t0, Operand(LESS));
1874 __ li(t1, Operand(GREATER));
1875 __ li(t2, Operand(EQUAL));
1876
1877 // Check if either rhs or lhs is NaN.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001878 __ BranchF(NULL, &nan, eq, f12, f14);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001879
1880 // Check if LESS condition is satisfied. If true, move conditionally
1881 // result to v0.
1882 __ c(OLT, D, f12, f14);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00001883 __ Movt(v0, t0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001884 // Use previous check to store conditionally to v0 oposite condition
1885 // (GREATER). If rhs is equal to lhs, this will be corrected in next
1886 // check.
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00001887 __ Movf(v0, t1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001888 // Check if EQUAL condition is satisfied. If true, move conditionally
1889 // result to v0.
1890 __ c(EQ, D, f12, f14);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00001891 __ Movt(v0, t2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001892
1893 __ Ret();
1894
1895 __ bind(&nan);
1896 // NaN comparisons always fail.
1897 // Load whatever we need in v0 to make the comparison fail.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001898 if (cc == lt || cc == le) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001899 __ li(v0, Operand(GREATER));
1900 } else {
1901 __ li(v0, Operand(LESS));
1902 }
1903 __ Ret();
1904 } else {
1905 // Checks for NaN in the doubles we have loaded. Can return the answer or
1906 // fall through if neither is a NaN. Also binds rhs_not_nan.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001907 EmitNanCheck(masm, cc);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001908
1909 // Compares two doubles that are not NaNs. Returns the answer.
1910 // Never falls through.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001911 EmitTwoNonNanDoubleComparison(masm, cc);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001912 }
1913
1914 __ bind(&not_smis);
1915 // At this point we know we are dealing with two different objects,
1916 // and neither of them is a Smi. The objects are in lhs_ and rhs_.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001917 if (strict()) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001918 // This returns non-equal for some object types, or falls through if it
1919 // was not lucky.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001920 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001921 }
1922
1923 Label check_for_symbols;
1924 Label flat_string_check;
1925 // Check for heap-number-heap-number comparison. Can jump to slow case,
1926 // or load both doubles and jump to the code that handles
1927 // that case. If the inputs are not doubles then jumps to check_for_symbols.
1928 // In this case a2 will contain the type of lhs_.
1929 EmitCheckForTwoHeapNumbers(masm,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001930 lhs,
1931 rhs,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001932 &both_loaded_as_doubles,
1933 &check_for_symbols,
1934 &flat_string_check);
1935
1936 __ bind(&check_for_symbols);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001937 if (cc == eq && !strict()) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001938 // Returns an answer for two symbols or two detectable objects.
1939 // Otherwise jumps to string case or not both strings case.
1940 // Assumes that a2 is the type of lhs_ on entry.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001941 EmitCheckForSymbolsOrObjects(masm, lhs, rhs, &flat_string_check, &slow);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001942 }
1943
1944 // Check for both being sequential ASCII strings, and inline if that is the
1945 // case.
1946 __ bind(&flat_string_check);
1947
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001948 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001949
1950 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001951 if (cc == eq) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001952 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001953 lhs,
1954 rhs,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001955 a2,
1956 a3,
1957 t0);
1958 } else {
1959 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001960 lhs,
1961 rhs,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001962 a2,
1963 a3,
1964 t0,
1965 t1);
1966 }
1967 // Never falls through to here.
1968
1969 __ bind(&slow);
1970 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
1971 // a1 (rhs) second.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001972 __ Push(lhs, rhs);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001973 // Figure out which native to call and setup the arguments.
1974 Builtins::JavaScript native;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001975 if (cc == eq) {
1976 native = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001977 } else {
1978 native = Builtins::COMPARE;
1979 int ncr; // NaN compare result.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001980 if (cc == lt || cc == le) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001981 ncr = GREATER;
1982 } else {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001983 ASSERT(cc == gt || cc == ge); // Remaining cases.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00001984 ncr = LESS;
1985 }
1986 __ li(a0, Operand(Smi::FromInt(ncr)));
1987 __ push(a0);
1988 }
1989
1990 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1991 // tagged as a small integer.
1992 __ InvokeBuiltin(native, JUMP_FUNCTION);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00001993
1994 __ bind(&miss);
1995 GenerateMiss(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00001996}
1997
1998
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00001999// The stub expects its argument in the tos_ register and returns its result in
2000// it, too: zero for false, and a non-zero value for true.
lrn@chromium.org7516f052011-03-30 08:52:27 +00002001void ToBooleanStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002002 // This stub uses FPU instructions.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002003 CpuFeatures::Scope scope(FPU);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002004
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002005 Label patch;
2006 const Register map = t5.is(tos_) ? t3 : t5;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002007
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002008 // undefined -> false.
2009 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002010
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002011 // Boolean -> its value.
2012 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
2013 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002014
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002015 // 'null' -> false.
2016 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002017
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002018 if (types_.Contains(SMI)) {
2019 // Smis: 0 -> false, all other -> true
2020 __ And(at, tos_, kSmiTagMask);
2021 // tos_ contains the correct return value already
2022 __ Ret(eq, at, Operand(zero_reg));
2023 } else if (types_.NeedsMap()) {
2024 // If we need a map later and have a Smi -> patch.
2025 __ JumpIfSmi(tos_, &patch);
2026 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002027
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002028 if (types_.NeedsMap()) {
2029 __ lw(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002030
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002031 if (types_.CanBeUndetectable()) {
2032 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
2033 __ And(at, at, Operand(1 << Map::kIsUndetectable));
2034 // Undetectable -> false.
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00002035 __ Movn(tos_, zero_reg, at);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002036 __ Ret(ne, at, Operand(zero_reg));
2037 }
2038 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002039
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002040 if (types_.Contains(SPEC_OBJECT)) {
2041 // Spec object -> true.
2042 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2043 // tos_ contains the correct non-zero return value already.
2044 __ Ret(ge, at, Operand(FIRST_SPEC_OBJECT_TYPE));
2045 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002046
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002047 if (types_.Contains(STRING)) {
2048 // String value -> false iff empty.
2049 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
2050 Label skip;
2051 __ Branch(&skip, ge, at, Operand(FIRST_NONSTRING_TYPE));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002052 __ Ret(USE_DELAY_SLOT); // the string length is OK as the return value
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002053 __ lw(tos_, FieldMemOperand(tos_, String::kLengthOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002054 __ bind(&skip);
2055 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002056
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002057 if (types_.Contains(HEAP_NUMBER)) {
2058 // Heap number -> false iff +0, -0, or NaN.
2059 Label not_heap_number;
2060 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2061 __ Branch(&not_heap_number, ne, map, Operand(at));
2062 Label zero_or_nan, number;
2063 __ ldc1(f2, FieldMemOperand(tos_, HeapNumber::kValueOffset));
2064 __ BranchF(&number, &zero_or_nan, ne, f2, kDoubleRegZero);
2065 // "tos_" is a register, and contains a non zero value by default.
2066 // Hence we only need to overwrite "tos_" with zero to return false for
2067 // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
2068 __ bind(&zero_or_nan);
2069 __ mov(tos_, zero_reg);
2070 __ bind(&number);
2071 __ Ret();
2072 __ bind(&not_heap_number);
2073 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002074
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002075 __ bind(&patch);
2076 GenerateTypeTransition(masm);
2077}
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002078
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002079
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002080void ToBooleanStub::CheckOddball(MacroAssembler* masm,
2081 Type type,
2082 Heap::RootListIndex value,
2083 bool result) {
2084 if (types_.Contains(type)) {
2085 // If we see an expected oddball, return its ToBoolean value tos_.
2086 __ LoadRoot(at, value);
2087 __ Subu(at, at, tos_); // This is a check for equality for the movz below.
2088 // The value of a root is never NULL, so we can avoid loading a non-null
2089 // value into tos_ when we want to return 'true'.
2090 if (!result) {
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00002091 __ Movz(tos_, zero_reg, at);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002092 }
2093 __ Ret(eq, at, Operand(zero_reg));
2094 }
2095}
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002096
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002097
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002098void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
2099 __ Move(a3, tos_);
2100 __ li(a2, Operand(Smi::FromInt(tos_.code())));
2101 __ li(a1, Operand(Smi::FromInt(types_.ToByte())));
2102 __ Push(a3, a2, a1);
2103 // Patch the caller to an appropriate specialized stub and return the
2104 // operation result to the caller of the stub.
2105 __ TailCallExternalReference(
2106 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
2107 3,
2108 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002109}
2110
2111
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00002112void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
2113 // We don't allow a GC during a store buffer overflow so there is no need to
2114 // store the registers in any particular way, but we do have to store and
2115 // restore them.
2116 __ MultiPush(kJSCallerSaved | ra.bit());
2117 if (save_doubles_ == kSaveFPRegs) {
2118 CpuFeatures::Scope scope(FPU);
2119 __ MultiPushFPU(kCallerSavedFPU);
2120 }
2121 const int argument_count = 1;
2122 const int fp_argument_count = 0;
2123 const Register scratch = a1;
2124
2125 AllowExternalCallThatCantCauseGC scope(masm);
2126 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
2127 __ li(a0, Operand(ExternalReference::isolate_address()));
2128 __ CallCFunction(
2129 ExternalReference::store_buffer_overflow_function(masm->isolate()),
2130 argument_count);
2131 if (save_doubles_ == kSaveFPRegs) {
2132 CpuFeatures::Scope scope(FPU);
2133 __ MultiPopFPU(kCallerSavedFPU);
2134 }
2135
2136 __ MultiPop(kJSCallerSaved | ra.bit());
2137 __ Ret();
2138}
2139
2140
whesse@chromium.org030d38e2011-07-13 13:23:34 +00002141void UnaryOpStub::PrintName(StringStream* stream) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002142 const char* op_name = Token::Name(op_);
2143 const char* overwrite_name = NULL; // Make g++ happy.
2144 switch (mode_) {
2145 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
2146 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
2147 }
whesse@chromium.org030d38e2011-07-13 13:23:34 +00002148 stream->Add("UnaryOpStub_%s_%s_%s",
2149 op_name,
2150 overwrite_name,
2151 UnaryOpIC::GetName(operand_type_));
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002152}
2153
2154
2155// TODO(svenpanne): Use virtual functions instead of switch.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002156void UnaryOpStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002157 switch (operand_type_) {
danno@chromium.org40cb8782011-05-25 07:58:50 +00002158 case UnaryOpIC::UNINITIALIZED:
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002159 GenerateTypeTransition(masm);
2160 break;
danno@chromium.org40cb8782011-05-25 07:58:50 +00002161 case UnaryOpIC::SMI:
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002162 GenerateSmiStub(masm);
2163 break;
danno@chromium.org40cb8782011-05-25 07:58:50 +00002164 case UnaryOpIC::HEAP_NUMBER:
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002165 GenerateHeapNumberStub(masm);
2166 break;
danno@chromium.org40cb8782011-05-25 07:58:50 +00002167 case UnaryOpIC::GENERIC:
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002168 GenerateGenericStub(masm);
2169 break;
2170 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002171}
2172
2173
danno@chromium.org40cb8782011-05-25 07:58:50 +00002174void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002175 // Argument is in a0 and v0 at this point, so we can overwrite a0.
ricow@chromium.org4f693d62011-07-04 14:01:31 +00002176 __ li(a2, Operand(Smi::FromInt(op_)));
2177 __ li(a1, Operand(Smi::FromInt(mode_)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002178 __ li(a0, Operand(Smi::FromInt(operand_type_)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002179 __ Push(v0, a2, a1, a0);
2180
2181 __ TailCallExternalReference(
ricow@chromium.org4f693d62011-07-04 14:01:31 +00002182 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002183}
2184
2185
2186// TODO(svenpanne): Use virtual functions instead of switch.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002187void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002188 switch (op_) {
2189 case Token::SUB:
2190 GenerateSmiStubSub(masm);
2191 break;
2192 case Token::BIT_NOT:
2193 GenerateSmiStubBitNot(masm);
2194 break;
2195 default:
2196 UNREACHABLE();
2197 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002198}
2199
2200
danno@chromium.org40cb8782011-05-25 07:58:50 +00002201void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002202 Label non_smi, slow;
2203 GenerateSmiCodeSub(masm, &non_smi, &slow);
2204 __ bind(&non_smi);
2205 __ bind(&slow);
2206 GenerateTypeTransition(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002207}
2208
2209
danno@chromium.org40cb8782011-05-25 07:58:50 +00002210void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002211 Label non_smi;
2212 GenerateSmiCodeBitNot(masm, &non_smi);
2213 __ bind(&non_smi);
2214 GenerateTypeTransition(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002215}
2216
2217
danno@chromium.org40cb8782011-05-25 07:58:50 +00002218void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
2219 Label* non_smi,
2220 Label* slow) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002221 __ JumpIfNotSmi(a0, non_smi);
2222
2223 // The result of negating zero or the smallest negative smi is not a smi.
2224 __ And(t0, a0, ~0x80000000);
2225 __ Branch(slow, eq, t0, Operand(zero_reg));
2226
2227 // Return '0 - value'.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002228 __ Ret(USE_DELAY_SLOT);
2229 __ subu(v0, zero_reg, a0);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002230}
2231
2232
danno@chromium.org40cb8782011-05-25 07:58:50 +00002233void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
2234 Label* non_smi) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002235 __ JumpIfNotSmi(a0, non_smi);
2236
2237 // Flip bits and revert inverted smi-tag.
2238 __ Neg(v0, a0);
2239 __ And(v0, v0, ~kSmiTagMask);
2240 __ Ret();
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002241}
2242
2243
2244// TODO(svenpanne): Use virtual functions instead of switch.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002245void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002246 switch (op_) {
2247 case Token::SUB:
2248 GenerateHeapNumberStubSub(masm);
2249 break;
2250 case Token::BIT_NOT:
2251 GenerateHeapNumberStubBitNot(masm);
2252 break;
2253 default:
2254 UNREACHABLE();
2255 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002256}
2257
2258
danno@chromium.org40cb8782011-05-25 07:58:50 +00002259void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
2260 Label non_smi, slow, call_builtin;
2261 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002262 __ bind(&non_smi);
2263 GenerateHeapNumberCodeSub(masm, &slow);
2264 __ bind(&slow);
2265 GenerateTypeTransition(masm);
danno@chromium.org40cb8782011-05-25 07:58:50 +00002266 __ bind(&call_builtin);
2267 GenerateGenericCodeFallback(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002268}
2269
2270
danno@chromium.org40cb8782011-05-25 07:58:50 +00002271void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002272 Label non_smi, slow;
2273 GenerateSmiCodeBitNot(masm, &non_smi);
2274 __ bind(&non_smi);
2275 GenerateHeapNumberCodeBitNot(masm, &slow);
2276 __ bind(&slow);
2277 GenerateTypeTransition(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002278}
2279
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00002280
danno@chromium.org40cb8782011-05-25 07:58:50 +00002281void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
2282 Label* slow) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002283 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2284 // a0 is a heap number. Get a new heap number in a1.
2285 if (mode_ == UNARY_OVERWRITE) {
2286 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2287 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2288 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2289 } else {
2290 Label slow_allocate_heapnumber, heapnumber_allocated;
2291 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber);
2292 __ jmp(&heapnumber_allocated);
2293
2294 __ bind(&slow_allocate_heapnumber);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002295 {
2296 FrameScope scope(masm, StackFrame::INTERNAL);
2297 __ push(a0);
2298 __ CallRuntime(Runtime::kNumberAlloc, 0);
2299 __ mov(a1, v0);
2300 __ pop(a0);
2301 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002302
2303 __ bind(&heapnumber_allocated);
2304 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
2305 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
2306 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset));
2307 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
2308 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset));
2309 __ mov(v0, a1);
2310 }
2311 __ Ret();
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002312}
2313
2314
danno@chromium.org40cb8782011-05-25 07:58:50 +00002315void UnaryOpStub::GenerateHeapNumberCodeBitNot(
2316 MacroAssembler* masm,
2317 Label* slow) {
ricow@chromium.orgc54d3652011-05-30 09:20:16 +00002318 Label impossible;
2319
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002320 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
2321 // Convert the heap number in a0 to an untagged integer in a1.
2322 __ ConvertToInt32(a0, a1, a2, a3, f0, slow);
2323
2324 // Do the bitwise operation and check if the result fits in a smi.
2325 Label try_float;
2326 __ Neg(a1, a1);
2327 __ Addu(a2, a1, Operand(0x40000000));
2328 __ Branch(&try_float, lt, a2, Operand(zero_reg));
2329
2330 // Tag the result as a smi and we're done.
2331 __ SmiTag(v0, a1);
2332 __ Ret();
2333
2334 // Try to store the result in a heap number.
2335 __ bind(&try_float);
2336 if (mode_ == UNARY_NO_OVERWRITE) {
2337 Label slow_allocate_heapnumber, heapnumber_allocated;
ricow@chromium.orgc54d3652011-05-30 09:20:16 +00002338 // Allocate a new heap number without zapping v0, which we need if it fails.
2339 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002340 __ jmp(&heapnumber_allocated);
2341
2342 __ bind(&slow_allocate_heapnumber);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002343 {
2344 FrameScope scope(masm, StackFrame::INTERNAL);
2345 __ push(v0); // Push the heap number, not the untagged int32.
2346 __ CallRuntime(Runtime::kNumberAlloc, 0);
2347 __ mov(a2, v0); // Move the new heap number into a2.
2348 // Get the heap number into v0, now that the new heap number is in a2.
2349 __ pop(v0);
2350 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002351
ricow@chromium.orgc54d3652011-05-30 09:20:16 +00002352 // Convert the heap number in v0 to an untagged integer in a1.
2353 // This can't go slow-case because it's the same number we already
2354 // converted once again.
2355 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible);
2356 // Negate the result.
2357 __ Xor(a1, a1, -1);
2358
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002359 __ bind(&heapnumber_allocated);
ricow@chromium.orgc54d3652011-05-30 09:20:16 +00002360 __ mov(v0, a2); // Move newly allocated heap number to v0.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002361 }
2362
2363 if (CpuFeatures::IsSupported(FPU)) {
2364 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted.
2365 CpuFeatures::Scope scope(FPU);
2366 __ mtc1(a1, f0);
2367 __ cvt_d_w(f0, f0);
2368 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2369 __ Ret();
2370 } else {
2371 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not
2372 // have to set up a frame.
2373 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3);
2374 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2375 }
ricow@chromium.orgc54d3652011-05-30 09:20:16 +00002376
2377 __ bind(&impossible);
2378 if (FLAG_debug_code) {
2379 __ stop("Incorrect assumption in bit-not stub");
2380 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002381}
2382
2383
2384// TODO(svenpanne): Use virtual functions instead of switch.
danno@chromium.org40cb8782011-05-25 07:58:50 +00002385void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002386 switch (op_) {
2387 case Token::SUB:
2388 GenerateGenericStubSub(masm);
2389 break;
2390 case Token::BIT_NOT:
2391 GenerateGenericStubBitNot(masm);
2392 break;
2393 default:
2394 UNREACHABLE();
2395 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002396}
2397
2398
danno@chromium.org40cb8782011-05-25 07:58:50 +00002399void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002400 Label non_smi, slow;
2401 GenerateSmiCodeSub(masm, &non_smi, &slow);
2402 __ bind(&non_smi);
2403 GenerateHeapNumberCodeSub(masm, &slow);
2404 __ bind(&slow);
2405 GenerateGenericCodeFallback(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002406}
2407
2408
danno@chromium.org40cb8782011-05-25 07:58:50 +00002409void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002410 Label non_smi, slow;
2411 GenerateSmiCodeBitNot(masm, &non_smi);
2412 __ bind(&non_smi);
2413 GenerateHeapNumberCodeBitNot(masm, &slow);
2414 __ bind(&slow);
2415 GenerateGenericCodeFallback(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002416}
2417
2418
danno@chromium.org40cb8782011-05-25 07:58:50 +00002419void UnaryOpStub::GenerateGenericCodeFallback(
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002420 MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002421 // Handle the slow case by jumping to the JavaScript builtin.
2422 __ push(a0);
2423 switch (op_) {
2424 case Token::SUB:
2425 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
2426 break;
2427 case Token::BIT_NOT:
2428 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2429 break;
2430 default:
2431 UNREACHABLE();
2432 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002433}
2434
2435
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002436void BinaryOpStub::Initialize() {
2437 platform_specific_bit_ = CpuFeatures::IsSupported(FPU);
2438}
2439
2440
danno@chromium.org40cb8782011-05-25 07:58:50 +00002441void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002442 Label get_result;
2443
2444 __ Push(a1, a0);
2445
2446 __ li(a2, Operand(Smi::FromInt(MinorKey())));
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002447 __ push(a2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002448
2449 __ TailCallExternalReference(
danno@chromium.org40cb8782011-05-25 07:58:50 +00002450 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002451 masm->isolate()),
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002452 3,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002453 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002454}
2455
2456
danno@chromium.org40cb8782011-05-25 07:58:50 +00002457void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
lrn@chromium.org7516f052011-03-30 08:52:27 +00002458 MacroAssembler* masm) {
2459 UNIMPLEMENTED();
2460}
2461
2462
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002463void BinaryOpStub_GenerateSmiSmiOperation(MacroAssembler* masm,
2464 Token::Value op) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002465 Register left = a1;
2466 Register right = a0;
2467
2468 Register scratch1 = t0;
2469 Register scratch2 = t1;
2470
2471 ASSERT(right.is(a0));
2472 STATIC_ASSERT(kSmiTag == 0);
2473
2474 Label not_smi_result;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002475 switch (op) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002476 case Token::ADD:
2477 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2478 __ RetOnNoOverflow(scratch1);
2479 // No need to revert anything - right and left are intact.
2480 break;
2481 case Token::SUB:
2482 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2483 __ RetOnNoOverflow(scratch1);
2484 // No need to revert anything - right and left are intact.
2485 break;
2486 case Token::MUL: {
2487 // Remove tag from one of the operands. This way the multiplication result
2488 // will be a smi if it fits the smi range.
2489 __ SmiUntag(scratch1, right);
2490 // Do multiplication.
2491 // lo = lower 32 bits of scratch1 * left.
2492 // hi = higher 32 bits of scratch1 * left.
2493 __ Mult(left, scratch1);
2494 // Check for overflowing the smi range - no overflow if higher 33 bits of
2495 // the result are identical.
2496 __ mflo(scratch1);
2497 __ mfhi(scratch2);
2498 __ sra(scratch1, scratch1, 31);
2499 __ Branch(&not_smi_result, ne, scratch1, Operand(scratch2));
2500 // Go slow on zero result to handle -0.
2501 __ mflo(v0);
2502 __ Ret(ne, v0, Operand(zero_reg));
2503 // We need -0 if we were multiplying a negative number with 0 to get 0.
2504 // We know one of them was zero.
2505 __ Addu(scratch2, right, left);
2506 Label skip;
2507 // ARM uses the 'pl' condition, which is 'ge'.
2508 // Negating it results in 'lt'.
2509 __ Branch(&skip, lt, scratch2, Operand(zero_reg));
2510 ASSERT(Smi::FromInt(0) == 0);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002511 __ Ret(USE_DELAY_SLOT);
2512 __ mov(v0, zero_reg); // Return smi 0 if the non-zero one was positive.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002513 __ bind(&skip);
2514 // We fall through here if we multiplied a negative number with 0, because
2515 // that would mean we should produce -0.
2516 }
2517 break;
2518 case Token::DIV: {
2519 Label done;
2520 __ SmiUntag(scratch2, right);
2521 __ SmiUntag(scratch1, left);
2522 __ Div(scratch1, scratch2);
2523 // A minor optimization: div may be calculated asynchronously, so we check
2524 // for division by zero before getting the result.
2525 __ Branch(&not_smi_result, eq, scratch2, Operand(zero_reg));
2526 // If the result is 0, we need to make sure the dividsor (right) is
2527 // positive, otherwise it is a -0 case.
2528 // Quotient is in 'lo', remainder is in 'hi'.
2529 // Check for no remainder first.
2530 __ mfhi(scratch1);
2531 __ Branch(&not_smi_result, ne, scratch1, Operand(zero_reg));
2532 __ mflo(scratch1);
2533 __ Branch(&done, ne, scratch1, Operand(zero_reg));
2534 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg));
2535 __ bind(&done);
2536 // Check that the signed result fits in a Smi.
2537 __ Addu(scratch2, scratch1, Operand(0x40000000));
2538 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg));
2539 __ SmiTag(v0, scratch1);
2540 __ Ret();
2541 }
2542 break;
2543 case Token::MOD: {
2544 Label done;
2545 __ SmiUntag(scratch2, right);
2546 __ SmiUntag(scratch1, left);
2547 __ Div(scratch1, scratch2);
2548 // A minor optimization: div may be calculated asynchronously, so we check
2549 // for division by 0 before calling mfhi.
2550 // Check for zero on the right hand side.
2551 __ Branch(&not_smi_result, eq, scratch2, Operand(zero_reg));
2552 // If the result is 0, we need to make sure the dividend (left) is
2553 // positive (or 0), otherwise it is a -0 case.
2554 // Remainder is in 'hi'.
2555 __ mfhi(scratch2);
2556 __ Branch(&done, ne, scratch2, Operand(zero_reg));
2557 __ Branch(&not_smi_result, lt, scratch1, Operand(zero_reg));
2558 __ bind(&done);
2559 // Check that the signed result fits in a Smi.
2560 __ Addu(scratch1, scratch2, Operand(0x40000000));
2561 __ Branch(&not_smi_result, lt, scratch1, Operand(zero_reg));
2562 __ SmiTag(v0, scratch2);
2563 __ Ret();
2564 }
2565 break;
2566 case Token::BIT_OR:
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002567 __ Ret(USE_DELAY_SLOT);
2568 __ or_(v0, left, right);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002569 break;
2570 case Token::BIT_AND:
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002571 __ Ret(USE_DELAY_SLOT);
2572 __ and_(v0, left, right);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002573 break;
2574 case Token::BIT_XOR:
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002575 __ Ret(USE_DELAY_SLOT);
2576 __ xor_(v0, left, right);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002577 break;
2578 case Token::SAR:
2579 // Remove tags from right operand.
2580 __ GetLeastBitsFromSmi(scratch1, right, 5);
2581 __ srav(scratch1, left, scratch1);
2582 // Smi tag result.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002583 __ And(v0, scratch1, ~kSmiTagMask);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002584 __ Ret();
2585 break;
2586 case Token::SHR:
2587 // Remove tags from operands. We can't do this on a 31 bit number
2588 // because then the 0s get shifted into bit 30 instead of bit 31.
2589 __ SmiUntag(scratch1, left);
2590 __ GetLeastBitsFromSmi(scratch2, right, 5);
2591 __ srlv(v0, scratch1, scratch2);
2592 // Unsigned shift is not allowed to produce a negative number, so
2593 // check the sign bit and the sign bit after Smi tagging.
2594 __ And(scratch1, v0, Operand(0xc0000000));
2595 __ Branch(&not_smi_result, ne, scratch1, Operand(zero_reg));
2596 // Smi tag result.
2597 __ SmiTag(v0);
2598 __ Ret();
2599 break;
2600 case Token::SHL:
2601 // Remove tags from operands.
2602 __ SmiUntag(scratch1, left);
2603 __ GetLeastBitsFromSmi(scratch2, right, 5);
2604 __ sllv(scratch1, scratch1, scratch2);
2605 // Check that the signed result fits in a Smi.
2606 __ Addu(scratch2, scratch1, Operand(0x40000000));
2607 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg));
2608 __ SmiTag(v0, scratch1);
2609 __ Ret();
2610 break;
2611 default:
2612 UNREACHABLE();
2613 }
2614 __ bind(&not_smi_result);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002615}
2616
2617
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002618void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
2619 Register result,
2620 Register heap_number_map,
2621 Register scratch1,
2622 Register scratch2,
2623 Label* gc_required,
2624 OverwriteMode mode);
2625
2626
2627void BinaryOpStub_GenerateFPOperation(MacroAssembler* masm,
2628 BinaryOpIC::TypeInfo left_type,
2629 BinaryOpIC::TypeInfo right_type,
2630 bool smi_operands,
2631 Label* not_numbers,
2632 Label* gc_required,
2633 Label* miss,
2634 Token::Value op,
2635 OverwriteMode mode) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002636 Register left = a1;
2637 Register right = a0;
2638 Register scratch1 = t3;
2639 Register scratch2 = t5;
2640 Register scratch3 = t0;
2641
2642 ASSERT(smi_operands || (not_numbers != NULL));
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +00002643 if (smi_operands) {
2644 __ AssertSmi(left);
2645 __ AssertSmi(right);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002646 }
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002647 if (left_type == BinaryOpIC::SMI) {
2648 __ JumpIfNotSmi(left, miss);
2649 }
2650 if (right_type == BinaryOpIC::SMI) {
2651 __ JumpIfNotSmi(right, miss);
2652 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002653
2654 Register heap_number_map = t2;
2655 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2656
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002657 switch (op) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002658 case Token::ADD:
2659 case Token::SUB:
2660 case Token::MUL:
2661 case Token::DIV:
2662 case Token::MOD: {
2663 // Load left and right operands into f12 and f14 or a0/a1 and a2/a3
2664 // depending on whether FPU is available or not.
2665 FloatingPointHelper::Destination destination =
2666 CpuFeatures::IsSupported(FPU) &&
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002667 op != Token::MOD ?
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002668 FloatingPointHelper::kFPURegisters :
2669 FloatingPointHelper::kCoreRegisters;
2670
2671 // Allocate new heap number for result.
2672 Register result = s0;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002673 BinaryOpStub_GenerateHeapResultAllocation(
2674 masm, result, heap_number_map, scratch1, scratch2, gc_required, mode);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002675
2676 // Load the operands.
2677 if (smi_operands) {
2678 FloatingPointHelper::LoadSmis(masm, destination, scratch1, scratch2);
2679 } else {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002680 // Load right operand to f14 or a2/a3.
2681 if (right_type == BinaryOpIC::INT32) {
2682 FloatingPointHelper::LoadNumberAsInt32Double(
2683 masm, right, destination, f14, f16, a2, a3, heap_number_map,
2684 scratch1, scratch2, f2, miss);
2685 } else {
2686 Label* fail = (right_type == BinaryOpIC::HEAP_NUMBER) ? miss
2687 : not_numbers;
2688 FloatingPointHelper::LoadNumber(
2689 masm, destination, right, f14, a2, a3, heap_number_map,
2690 scratch1, scratch2, fail);
2691 }
2692 // Load left operand to f12 or a0/a1. This keeps a0/a1 intact if it
2693 // jumps to |miss|.
2694 if (left_type == BinaryOpIC::INT32) {
2695 FloatingPointHelper::LoadNumberAsInt32Double(
2696 masm, left, destination, f12, f16, a0, a1, heap_number_map,
2697 scratch1, scratch2, f2, miss);
2698 } else {
2699 Label* fail = (left_type == BinaryOpIC::HEAP_NUMBER) ? miss
2700 : not_numbers;
2701 FloatingPointHelper::LoadNumber(
2702 masm, destination, left, f12, a0, a1, heap_number_map,
2703 scratch1, scratch2, fail);
2704 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002705 }
2706
2707 // Calculate the result.
2708 if (destination == FloatingPointHelper::kFPURegisters) {
2709 // Using FPU registers:
2710 // f12: Left value.
2711 // f14: Right value.
2712 CpuFeatures::Scope scope(FPU);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002713 switch (op) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002714 case Token::ADD:
2715 __ add_d(f10, f12, f14);
2716 break;
2717 case Token::SUB:
2718 __ sub_d(f10, f12, f14);
2719 break;
2720 case Token::MUL:
2721 __ mul_d(f10, f12, f14);
2722 break;
2723 case Token::DIV:
2724 __ div_d(f10, f12, f14);
2725 break;
2726 default:
2727 UNREACHABLE();
2728 }
2729
2730 // ARM uses a workaround here because of the unaligned HeapNumber
2731 // kValueOffset. On MIPS this workaround is built into sdc1 so
2732 // there's no point in generating even more instructions.
2733 __ sdc1(f10, FieldMemOperand(result, HeapNumber::kValueOffset));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00002734 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002735 __ mov(v0, result);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002736 } else {
2737 // Call the C function to handle the double operation.
2738 FloatingPointHelper::CallCCodeForDoubleOperation(masm,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002739 op,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002740 result,
2741 scratch1);
2742 if (FLAG_debug_code) {
2743 __ stop("Unreachable code.");
2744 }
2745 }
2746 break;
2747 }
2748 case Token::BIT_OR:
2749 case Token::BIT_XOR:
2750 case Token::BIT_AND:
2751 case Token::SAR:
2752 case Token::SHR:
2753 case Token::SHL: {
2754 if (smi_operands) {
2755 __ SmiUntag(a3, left);
2756 __ SmiUntag(a2, right);
2757 } else {
2758 // Convert operands to 32-bit integers. Right in a2 and left in a3.
2759 FloatingPointHelper::ConvertNumberToInt32(masm,
2760 left,
2761 a3,
2762 heap_number_map,
2763 scratch1,
2764 scratch2,
2765 scratch3,
2766 f0,
2767 not_numbers);
2768 FloatingPointHelper::ConvertNumberToInt32(masm,
2769 right,
2770 a2,
2771 heap_number_map,
2772 scratch1,
2773 scratch2,
2774 scratch3,
2775 f0,
2776 not_numbers);
2777 }
2778 Label result_not_a_smi;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002779 switch (op) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002780 case Token::BIT_OR:
2781 __ Or(a2, a3, Operand(a2));
2782 break;
2783 case Token::BIT_XOR:
2784 __ Xor(a2, a3, Operand(a2));
2785 break;
2786 case Token::BIT_AND:
2787 __ And(a2, a3, Operand(a2));
2788 break;
2789 case Token::SAR:
2790 // Use only the 5 least significant bits of the shift count.
2791 __ GetLeastBitsFromInt32(a2, a2, 5);
2792 __ srav(a2, a3, a2);
2793 break;
2794 case Token::SHR:
2795 // Use only the 5 least significant bits of the shift count.
2796 __ GetLeastBitsFromInt32(a2, a2, 5);
2797 __ srlv(a2, a3, a2);
2798 // SHR is special because it is required to produce a positive answer.
2799 // The code below for writing into heap numbers isn't capable of
2800 // writing the register as an unsigned int so we go to slow case if we
2801 // hit this case.
2802 if (CpuFeatures::IsSupported(FPU)) {
2803 __ Branch(&result_not_a_smi, lt, a2, Operand(zero_reg));
2804 } else {
2805 __ Branch(not_numbers, lt, a2, Operand(zero_reg));
2806 }
2807 break;
2808 case Token::SHL:
2809 // Use only the 5 least significant bits of the shift count.
2810 __ GetLeastBitsFromInt32(a2, a2, 5);
2811 __ sllv(a2, a3, a2);
2812 break;
2813 default:
2814 UNREACHABLE();
2815 }
2816 // Check that the *signed* result fits in a smi.
2817 __ Addu(a3, a2, Operand(0x40000000));
2818 __ Branch(&result_not_a_smi, lt, a3, Operand(zero_reg));
2819 __ SmiTag(v0, a2);
2820 __ Ret();
2821
2822 // Allocate new heap number for result.
2823 __ bind(&result_not_a_smi);
2824 Register result = t1;
2825 if (smi_operands) {
2826 __ AllocateHeapNumber(
2827 result, scratch1, scratch2, heap_number_map, gc_required);
2828 } else {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002829 BinaryOpStub_GenerateHeapResultAllocation(
2830 masm, result, heap_number_map, scratch1, scratch2, gc_required,
2831 mode);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002832 }
2833
2834 // a2: Answer as signed int32.
2835 // t1: Heap number to write answer into.
2836
2837 // Nothing can go wrong now, so move the heap number to v0, which is the
2838 // result.
2839 __ mov(v0, t1);
2840
2841 if (CpuFeatures::IsSupported(FPU)) {
2842 // Convert the int32 in a2 to the heap number in a0. As
2843 // mentioned above SHR needs to always produce a positive result.
2844 CpuFeatures::Scope scope(FPU);
2845 __ mtc1(a2, f0);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002846 if (op == Token::SHR) {
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00002847 __ Cvt_d_uw(f0, f0, f22);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002848 } else {
2849 __ cvt_d_w(f0, f0);
2850 }
2851 // ARM uses a workaround here because of the unaligned HeapNumber
2852 // kValueOffset. On MIPS this workaround is built into sdc1 so
2853 // there's no point in generating even more instructions.
2854 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2855 __ Ret();
2856 } else {
2857 // Tail call that writes the int32 in a2 to the heap number in v0, using
2858 // a3 and a0 as scratch. v0 is preserved and returned.
2859 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
2860 __ TailCallStub(&stub);
2861 }
2862 break;
2863 }
2864 default:
2865 UNREACHABLE();
2866 }
lrn@chromium.org7516f052011-03-30 08:52:27 +00002867}
2868
2869
2870// Generate the smi code. If the operation on smis are successful this return is
2871// generated. If the result is not a smi and heap number allocation is not
2872// requested the code falls through. If number allocation is requested but a
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002873// heap number cannot be allocated the code jumps to the label gc_required.
2874void BinaryOpStub_GenerateSmiCode(
danno@chromium.org40cb8782011-05-25 07:58:50 +00002875 MacroAssembler* masm,
karlklose@chromium.org83a47282011-05-11 11:54:09 +00002876 Label* use_runtime,
lrn@chromium.org7516f052011-03-30 08:52:27 +00002877 Label* gc_required,
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002878 Token::Value op,
2879 BinaryOpStub::SmiCodeGenerateHeapNumberResults allow_heapnumber_results,
2880 OverwriteMode mode) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002881 Label not_smis;
2882
2883 Register left = a1;
2884 Register right = a0;
2885 Register scratch1 = t3;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002886
2887 // Perform combined smi check on both operands.
2888 __ Or(scratch1, left, Operand(right));
2889 STATIC_ASSERT(kSmiTag == 0);
2890 __ JumpIfNotSmi(scratch1, &not_smis);
2891
2892 // If the smi-smi operation results in a smi return is generated.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002893 BinaryOpStub_GenerateSmiSmiOperation(masm, op);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002894
2895 // If heap number results are possible generate the result in an allocated
2896 // heap number.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002897 if (allow_heapnumber_results == BinaryOpStub::ALLOW_HEAPNUMBER_RESULTS) {
2898 BinaryOpStub_GenerateFPOperation(
2899 masm, BinaryOpIC::UNINITIALIZED, BinaryOpIC::UNINITIALIZED, true,
2900 use_runtime, gc_required, &not_smis, op, mode);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002901 }
2902 __ bind(&not_smis);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002903}
2904
2905
danno@chromium.org40cb8782011-05-25 07:58:50 +00002906void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002907 Label not_smis, call_runtime;
2908
danno@chromium.org40cb8782011-05-25 07:58:50 +00002909 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
2910 result_type_ == BinaryOpIC::SMI) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002911 // Only allow smi results.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002912 BinaryOpStub_GenerateSmiCode(
2913 masm, &call_runtime, NULL, op_, NO_HEAPNUMBER_RESULTS, mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002914 } else {
2915 // Allow heap number result and don't make a transition if a heap number
2916 // cannot be allocated.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002917 BinaryOpStub_GenerateSmiCode(
2918 masm, &call_runtime, &call_runtime, op_, ALLOW_HEAPNUMBER_RESULTS,
2919 mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002920 }
2921
2922 // Code falls through if the result is not returned as either a smi or heap
2923 // number.
2924 GenerateTypeTransition(masm);
2925
2926 __ bind(&call_runtime);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002927 GenerateRegisterArgsPush(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002928 GenerateCallRuntime(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002929}
2930
2931
danno@chromium.org40cb8782011-05-25 07:58:50 +00002932void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002933 Label call_runtime;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002934 ASSERT(left_type_ == BinaryOpIC::STRING && right_type_ == BinaryOpIC::STRING);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002935 ASSERT(op_ == Token::ADD);
2936 // If both arguments are strings, call the string add stub.
2937 // Otherwise, do a transition.
2938
2939 // Registers containing left and right operands respectively.
2940 Register left = a1;
2941 Register right = a0;
2942
2943 // Test if left operand is a string.
2944 __ JumpIfSmi(left, &call_runtime);
2945 __ GetObjectType(left, a2, a2);
2946 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2947
2948 // Test if right operand is a string.
2949 __ JumpIfSmi(right, &call_runtime);
2950 __ GetObjectType(right, a2, a2);
2951 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
2952
2953 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
2954 GenerateRegisterArgsPush(masm);
2955 __ TailCallStub(&string_add_stub);
2956
2957 __ bind(&call_runtime);
2958 GenerateTypeTransition(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00002959}
2960
2961
danno@chromium.org40cb8782011-05-25 07:58:50 +00002962void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002963 ASSERT(Max(left_type_, right_type_) == BinaryOpIC::INT32);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002964
2965 Register left = a1;
2966 Register right = a0;
2967 Register scratch1 = t3;
2968 Register scratch2 = t5;
2969 FPURegister double_scratch = f0;
2970 FPURegister single_scratch = f6;
2971
2972 Register heap_number_result = no_reg;
2973 Register heap_number_map = t2;
2974 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2975
2976 Label call_runtime;
2977 // Labels for type transition, used for wrong input or output types.
2978 // Both label are currently actually bound to the same position. We use two
2979 // different label to differentiate the cause leading to type transition.
2980 Label transition;
2981
2982 // Smi-smi fast case.
2983 Label skip;
2984 __ Or(scratch1, left, right);
2985 __ JumpIfNotSmi(scratch1, &skip);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002986 BinaryOpStub_GenerateSmiSmiOperation(masm, op_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00002987 // Fall through if the result is not a smi.
2988 __ bind(&skip);
2989
2990 switch (op_) {
2991 case Token::ADD:
2992 case Token::SUB:
2993 case Token::MUL:
2994 case Token::DIV:
2995 case Token::MOD: {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00002996 // It could be that only SMIs have been seen at either the left
2997 // or the right operand. For precise type feedback, patch the IC
2998 // again if this changes.
2999 if (left_type_ == BinaryOpIC::SMI) {
3000 __ JumpIfNotSmi(left, &transition);
3001 }
3002 if (right_type_ == BinaryOpIC::SMI) {
3003 __ JumpIfNotSmi(right, &transition);
3004 }
whesse@chromium.org7b260152011-06-20 15:33:18 +00003005 // Load both operands and check that they are 32-bit integer.
3006 // Jump to type transition if they are not. The registers a0 and a1 (right
3007 // and left) are preserved for the runtime call.
3008 FloatingPointHelper::Destination destination =
3009 (CpuFeatures::IsSupported(FPU) && op_ != Token::MOD)
3010 ? FloatingPointHelper::kFPURegisters
3011 : FloatingPointHelper::kCoreRegisters;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003012
whesse@chromium.org7b260152011-06-20 15:33:18 +00003013 FloatingPointHelper::LoadNumberAsInt32Double(masm,
3014 right,
3015 destination,
3016 f14,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003017 f16,
whesse@chromium.org7b260152011-06-20 15:33:18 +00003018 a2,
3019 a3,
3020 heap_number_map,
3021 scratch1,
3022 scratch2,
3023 f2,
3024 &transition);
3025 FloatingPointHelper::LoadNumberAsInt32Double(masm,
3026 left,
3027 destination,
3028 f12,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003029 f16,
whesse@chromium.org7b260152011-06-20 15:33:18 +00003030 t0,
3031 t1,
3032 heap_number_map,
3033 scratch1,
3034 scratch2,
3035 f2,
3036 &transition);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003037
3038 if (destination == FloatingPointHelper::kFPURegisters) {
3039 CpuFeatures::Scope scope(FPU);
3040 Label return_heap_number;
3041 switch (op_) {
3042 case Token::ADD:
3043 __ add_d(f10, f12, f14);
3044 break;
3045 case Token::SUB:
3046 __ sub_d(f10, f12, f14);
3047 break;
3048 case Token::MUL:
3049 __ mul_d(f10, f12, f14);
3050 break;
3051 case Token::DIV:
3052 __ div_d(f10, f12, f14);
3053 break;
3054 default:
3055 UNREACHABLE();
3056 }
3057
3058 if (op_ != Token::DIV) {
3059 // These operations produce an integer result.
3060 // Try to return a smi if we can.
3061 // Otherwise return a heap number if allowed, or jump to type
3062 // transition.
3063
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003064 Register except_flag = scratch2;
3065 __ EmitFPUTruncate(kRoundToZero,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003066 scratch1,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003067 f10,
3068 at,
3069 f16,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003070 except_flag);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003071
danno@chromium.org40cb8782011-05-25 07:58:50 +00003072 if (result_type_ <= BinaryOpIC::INT32) {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003073 // If except_flag != 0, result does not fit in a 32-bit integer.
3074 __ Branch(&transition, ne, except_flag, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003075 }
3076
3077 // Check if the result fits in a smi.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003078 __ Addu(scratch2, scratch1, Operand(0x40000000));
3079 // If not try to return a heap number.
3080 __ Branch(&return_heap_number, lt, scratch2, Operand(zero_reg));
3081 // Check for minus zero. Return heap number for minus zero.
3082 Label not_zero;
3083 __ Branch(&not_zero, ne, scratch1, Operand(zero_reg));
3084 __ mfc1(scratch2, f11);
3085 __ And(scratch2, scratch2, HeapNumber::kSignMask);
3086 __ Branch(&return_heap_number, ne, scratch2, Operand(zero_reg));
3087 __ bind(&not_zero);
3088
3089 // Tag the result and return.
3090 __ SmiTag(v0, scratch1);
3091 __ Ret();
3092 } else {
3093 // DIV just falls through to allocating a heap number.
3094 }
3095
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00003096 __ bind(&return_heap_number);
3097 // Return a heap number, or fall through to type transition or runtime
3098 // call if we can't.
whesse@chromium.org7b260152011-06-20 15:33:18 +00003099 if (result_type_ >= ((op_ == Token::DIV) ? BinaryOpIC::HEAP_NUMBER
3100 : BinaryOpIC::INT32)) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003101 // We are using FPU registers so s0 is available.
3102 heap_number_result = s0;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003103 BinaryOpStub_GenerateHeapResultAllocation(masm,
3104 heap_number_result,
3105 heap_number_map,
3106 scratch1,
3107 scratch2,
3108 &call_runtime,
3109 mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003110 __ mov(v0, heap_number_result);
3111 __ sdc1(f10, FieldMemOperand(v0, HeapNumber::kValueOffset));
3112 __ Ret();
3113 }
3114
3115 // A DIV operation expecting an integer result falls through
3116 // to type transition.
3117
3118 } else {
3119 // We preserved a0 and a1 to be able to call runtime.
3120 // Save the left value on the stack.
3121 __ Push(t1, t0);
3122
3123 Label pop_and_call_runtime;
3124
3125 // Allocate a heap number to store the result.
3126 heap_number_result = s0;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003127 BinaryOpStub_GenerateHeapResultAllocation(masm,
3128 heap_number_result,
3129 heap_number_map,
3130 scratch1,
3131 scratch2,
3132 &pop_and_call_runtime,
3133 mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003134
3135 // Load the left value from the value saved on the stack.
3136 __ Pop(a1, a0);
3137
3138 // Call the C function to handle the double operation.
3139 FloatingPointHelper::CallCCodeForDoubleOperation(
3140 masm, op_, heap_number_result, scratch1);
3141 if (FLAG_debug_code) {
3142 __ stop("Unreachable code.");
3143 }
3144
3145 __ bind(&pop_and_call_runtime);
3146 __ Drop(2);
3147 __ Branch(&call_runtime);
3148 }
3149
3150 break;
3151 }
3152
3153 case Token::BIT_OR:
3154 case Token::BIT_XOR:
3155 case Token::BIT_AND:
3156 case Token::SAR:
3157 case Token::SHR:
3158 case Token::SHL: {
3159 Label return_heap_number;
3160 Register scratch3 = t1;
3161 // Convert operands to 32-bit integers. Right in a2 and left in a3. The
3162 // registers a0 and a1 (right and left) are preserved for the runtime
3163 // call.
3164 FloatingPointHelper::LoadNumberAsInt32(masm,
3165 left,
3166 a3,
3167 heap_number_map,
3168 scratch1,
3169 scratch2,
3170 scratch3,
3171 f0,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003172 f2,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003173 &transition);
3174 FloatingPointHelper::LoadNumberAsInt32(masm,
3175 right,
3176 a2,
3177 heap_number_map,
3178 scratch1,
3179 scratch2,
3180 scratch3,
3181 f0,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003182 f2,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003183 &transition);
3184
3185 // The ECMA-262 standard specifies that, for shift operations, only the
3186 // 5 least significant bits of the shift value should be used.
3187 switch (op_) {
3188 case Token::BIT_OR:
3189 __ Or(a2, a3, Operand(a2));
3190 break;
3191 case Token::BIT_XOR:
3192 __ Xor(a2, a3, Operand(a2));
3193 break;
3194 case Token::BIT_AND:
3195 __ And(a2, a3, Operand(a2));
3196 break;
3197 case Token::SAR:
3198 __ And(a2, a2, Operand(0x1f));
3199 __ srav(a2, a3, a2);
3200 break;
3201 case Token::SHR:
3202 __ And(a2, a2, Operand(0x1f));
3203 __ srlv(a2, a3, a2);
3204 // SHR is special because it is required to produce a positive answer.
3205 // We only get a negative result if the shift value (a2) is 0.
3206 // This result cannot be respresented as a signed 32-bit integer, try
3207 // to return a heap number if we can.
3208 // The non FPU code does not support this special case, so jump to
3209 // runtime if we don't support it.
3210 if (CpuFeatures::IsSupported(FPU)) {
danno@chromium.org40cb8782011-05-25 07:58:50 +00003211 __ Branch((result_type_ <= BinaryOpIC::INT32)
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003212 ? &transition
3213 : &return_heap_number,
3214 lt,
3215 a2,
3216 Operand(zero_reg));
3217 } else {
danno@chromium.org40cb8782011-05-25 07:58:50 +00003218 __ Branch((result_type_ <= BinaryOpIC::INT32)
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003219 ? &transition
3220 : &call_runtime,
3221 lt,
3222 a2,
3223 Operand(zero_reg));
3224 }
3225 break;
3226 case Token::SHL:
3227 __ And(a2, a2, Operand(0x1f));
3228 __ sllv(a2, a3, a2);
3229 break;
3230 default:
3231 UNREACHABLE();
3232 }
3233
3234 // Check if the result fits in a smi.
3235 __ Addu(scratch1, a2, Operand(0x40000000));
3236 // If not try to return a heap number. (We know the result is an int32.)
3237 __ Branch(&return_heap_number, lt, scratch1, Operand(zero_reg));
3238 // Tag the result and return.
3239 __ SmiTag(v0, a2);
3240 __ Ret();
3241
3242 __ bind(&return_heap_number);
3243 heap_number_result = t1;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003244 BinaryOpStub_GenerateHeapResultAllocation(masm,
3245 heap_number_result,
3246 heap_number_map,
3247 scratch1,
3248 scratch2,
3249 &call_runtime,
3250 mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003251
3252 if (CpuFeatures::IsSupported(FPU)) {
3253 CpuFeatures::Scope scope(FPU);
3254
3255 if (op_ != Token::SHR) {
3256 // Convert the result to a floating point value.
3257 __ mtc1(a2, double_scratch);
3258 __ cvt_d_w(double_scratch, double_scratch);
3259 } else {
3260 // The result must be interpreted as an unsigned 32-bit integer.
3261 __ mtc1(a2, double_scratch);
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00003262 __ Cvt_d_uw(double_scratch, double_scratch, single_scratch);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003263 }
3264
3265 // Store the result.
3266 __ mov(v0, heap_number_result);
3267 __ sdc1(double_scratch, FieldMemOperand(v0, HeapNumber::kValueOffset));
3268 __ Ret();
3269 } else {
3270 // Tail call that writes the int32 in a2 to the heap number in v0, using
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003271 // a3 and a0 as scratch. v0 is preserved and returned.
rossberg@chromium.org400388e2012-06-06 09:29:22 +00003272 __ mov(v0, t1);
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003273 WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003274 __ TailCallStub(&stub);
3275 }
3276
3277 break;
3278 }
3279
3280 default:
3281 UNREACHABLE();
3282 }
3283
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00003284 // We never expect DIV to yield an integer result, so we always generate
3285 // type transition code for DIV operations expecting an integer result: the
3286 // code will fall through to this type transition.
3287 if (transition.is_linked() ||
3288 ((op_ == Token::DIV) && (result_type_ <= BinaryOpIC::INT32))) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003289 __ bind(&transition);
3290 GenerateTypeTransition(masm);
3291 }
3292
3293 __ bind(&call_runtime);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003294 GenerateRegisterArgsPush(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003295 GenerateCallRuntime(masm);
3296}
3297
3298
danno@chromium.org40cb8782011-05-25 07:58:50 +00003299void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003300 Label call_runtime;
3301
3302 if (op_ == Token::ADD) {
3303 // Handle string addition here, because it is the only operation
3304 // that does not do a ToNumber conversion on the operands.
3305 GenerateAddStrings(masm);
3306 }
3307
3308 // Convert oddball arguments to numbers.
3309 Label check, done;
3310 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3311 __ Branch(&check, ne, a1, Operand(t0));
3312 if (Token::IsBitOp(op_)) {
3313 __ li(a1, Operand(Smi::FromInt(0)));
3314 } else {
3315 __ LoadRoot(a1, Heap::kNanValueRootIndex);
3316 }
3317 __ jmp(&done);
3318 __ bind(&check);
3319 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3320 __ Branch(&done, ne, a0, Operand(t0));
3321 if (Token::IsBitOp(op_)) {
3322 __ li(a0, Operand(Smi::FromInt(0)));
3323 } else {
3324 __ LoadRoot(a0, Heap::kNanValueRootIndex);
3325 }
3326 __ bind(&done);
3327
3328 GenerateHeapNumberStub(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003329}
3330
3331
danno@chromium.org40cb8782011-05-25 07:58:50 +00003332void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003333 Label call_runtime, transition;
3334 BinaryOpStub_GenerateFPOperation(
3335 masm, left_type_, right_type_, false,
3336 &transition, &call_runtime, &transition, op_, mode_);
3337
3338 __ bind(&transition);
3339 GenerateTypeTransition(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003340
3341 __ bind(&call_runtime);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003342 GenerateRegisterArgsPush(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003343 GenerateCallRuntime(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003344}
3345
3346
danno@chromium.org40cb8782011-05-25 07:58:50 +00003347void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003348 Label call_runtime, call_string_add_or_runtime, transition;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003349
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003350 BinaryOpStub_GenerateSmiCode(
3351 masm, &call_runtime, &call_runtime, op_, ALLOW_HEAPNUMBER_RESULTS, mode_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003352
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003353 BinaryOpStub_GenerateFPOperation(
3354 masm, left_type_, right_type_, false,
3355 &call_string_add_or_runtime, &call_runtime, &transition, op_, mode_);
3356
3357 __ bind(&transition);
3358 GenerateTypeTransition(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003359
3360 __ bind(&call_string_add_or_runtime);
3361 if (op_ == Token::ADD) {
3362 GenerateAddStrings(masm);
3363 }
3364
3365 __ bind(&call_runtime);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003366 GenerateRegisterArgsPush(masm);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003367 GenerateCallRuntime(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003368}
3369
3370
danno@chromium.org40cb8782011-05-25 07:58:50 +00003371void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003372 ASSERT(op_ == Token::ADD);
3373 Label left_not_string, call_runtime;
3374
3375 Register left = a1;
3376 Register right = a0;
3377
3378 // Check if left argument is a string.
3379 __ JumpIfSmi(left, &left_not_string);
3380 __ GetObjectType(left, a2, a2);
3381 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE));
3382
3383 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
3384 GenerateRegisterArgsPush(masm);
3385 __ TailCallStub(&string_add_left_stub);
3386
3387 // Left operand is not a string, test right.
3388 __ bind(&left_not_string);
3389 __ JumpIfSmi(right, &call_runtime);
3390 __ GetObjectType(right, a2, a2);
3391 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE));
3392
3393 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
3394 GenerateRegisterArgsPush(masm);
3395 __ TailCallStub(&string_add_right_stub);
3396
3397 // At least one argument is not a string.
3398 __ bind(&call_runtime);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003399}
3400
3401
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003402void BinaryOpStub_GenerateHeapResultAllocation(MacroAssembler* masm,
3403 Register result,
3404 Register heap_number_map,
3405 Register scratch1,
3406 Register scratch2,
3407 Label* gc_required,
3408 OverwriteMode mode) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003409 // Code below will scratch result if allocation fails. To keep both arguments
3410 // intact for the runtime call result cannot be one of these.
3411 ASSERT(!result.is(a0) && !result.is(a1));
3412
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003413 if (mode == OVERWRITE_LEFT || mode == OVERWRITE_RIGHT) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003414 Label skip_allocation, allocated;
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003415 Register overwritable_operand = mode == OVERWRITE_LEFT ? a1 : a0;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003416 // If the overwritable operand is already an object, we skip the
3417 // allocation of a heap number.
3418 __ JumpIfNotSmi(overwritable_operand, &skip_allocation);
3419 // Allocate a heap number for the result.
3420 __ AllocateHeapNumber(
3421 result, scratch1, scratch2, heap_number_map, gc_required);
3422 __ Branch(&allocated);
3423 __ bind(&skip_allocation);
3424 // Use object holding the overwritable operand for result.
3425 __ mov(result, overwritable_operand);
3426 __ bind(&allocated);
3427 } else {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00003428 ASSERT(mode == NO_OVERWRITE);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003429 __ AllocateHeapNumber(
3430 result, scratch1, scratch2, heap_number_map, gc_required);
3431 }
lrn@chromium.org7516f052011-03-30 08:52:27 +00003432}
3433
3434
danno@chromium.org40cb8782011-05-25 07:58:50 +00003435void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003436 __ Push(a1, a0);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003437}
3438
3439
3440
3441void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003442 // Untagged case: double input in f4, double result goes
3443 // into f4.
3444 // Tagged case: tagged input on top of stack and in a0,
3445 // tagged result (heap number) goes into v0.
3446
3447 Label input_not_smi;
3448 Label loaded;
3449 Label calculate;
3450 Label invalid_cache;
3451 const Register scratch0 = t5;
3452 const Register scratch1 = t3;
3453 const Register cache_entry = a0;
3454 const bool tagged = (argument_type_ == TAGGED);
3455
3456 if (CpuFeatures::IsSupported(FPU)) {
3457 CpuFeatures::Scope scope(FPU);
3458
3459 if (tagged) {
3460 // Argument is a number and is on stack and in a0.
3461 // Load argument and check if it is a smi.
3462 __ JumpIfNotSmi(a0, &input_not_smi);
3463
3464 // Input is a smi. Convert to double and load the low and high words
3465 // of the double into a2, a3.
3466 __ sra(t0, a0, kSmiTagSize);
3467 __ mtc1(t0, f4);
3468 __ cvt_d_w(f4, f4);
danno@chromium.org40cb8782011-05-25 07:58:50 +00003469 __ Move(a2, a3, f4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003470 __ Branch(&loaded);
3471
3472 __ bind(&input_not_smi);
3473 // Check if input is a HeapNumber.
3474 __ CheckMap(a0,
3475 a1,
3476 Heap::kHeapNumberMapRootIndex,
3477 &calculate,
danno@chromium.org40cb8782011-05-25 07:58:50 +00003478 DONT_DO_SMI_CHECK);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003479 // Input is a HeapNumber. Store the
3480 // low and high words into a2, a3.
3481 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
3482 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4));
3483 } else {
3484 // Input is untagged double in f4. Output goes to f4.
danno@chromium.org40cb8782011-05-25 07:58:50 +00003485 __ Move(a2, a3, f4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003486 }
3487 __ bind(&loaded);
3488 // a2 = low 32 bits of double value.
3489 // a3 = high 32 bits of double value.
3490 // Compute hash (the shifts are arithmetic):
3491 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
3492 __ Xor(a1, a2, a3);
3493 __ sra(t0, a1, 16);
3494 __ Xor(a1, a1, t0);
3495 __ sra(t0, a1, 8);
3496 __ Xor(a1, a1, t0);
3497 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
3498 __ And(a1, a1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
3499
3500 // a2 = low 32 bits of double value.
3501 // a3 = high 32 bits of double value.
3502 // a1 = TranscendentalCache::hash(double value).
3503 __ li(cache_entry, Operand(
3504 ExternalReference::transcendental_cache_array_address(
3505 masm->isolate())));
3506 // a0 points to cache array.
3507 __ lw(cache_entry, MemOperand(cache_entry, type_ * sizeof(
3508 Isolate::Current()->transcendental_cache()->caches_[0])));
3509 // a0 points to the cache for the type type_.
3510 // If NULL, the cache hasn't been initialized yet, so go through runtime.
3511 __ Branch(&invalid_cache, eq, cache_entry, Operand(zero_reg));
3512
3513#ifdef DEBUG
3514 // Check that the layout of cache elements match expectations.
3515 { TranscendentalCache::SubCache::Element test_elem[2];
3516 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
3517 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
3518 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
3519 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
3520 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
3521 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
3522 CHECK_EQ(0, elem_in0 - elem_start);
3523 CHECK_EQ(kIntSize, elem_in1 - elem_start);
3524 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
3525 }
3526#endif
3527
3528 // Find the address of the a1'st entry in the cache, i.e., &a0[a1*12].
3529 __ sll(t0, a1, 1);
3530 __ Addu(a1, a1, t0);
3531 __ sll(t0, a1, 2);
3532 __ Addu(cache_entry, cache_entry, t0);
3533
3534 // Check if cache matches: Double value is stored in uint32_t[2] array.
3535 __ lw(t0, MemOperand(cache_entry, 0));
3536 __ lw(t1, MemOperand(cache_entry, 4));
3537 __ lw(t2, MemOperand(cache_entry, 8));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003538 __ Branch(&calculate, ne, a2, Operand(t0));
3539 __ Branch(&calculate, ne, a3, Operand(t1));
3540 // Cache hit. Load result, cleanup and return.
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003541 Counters* counters = masm->isolate()->counters();
3542 __ IncrementCounter(
3543 counters->transcendental_cache_hit(), 1, scratch0, scratch1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003544 if (tagged) {
3545 // Pop input value from stack and load result into v0.
3546 __ Drop(1);
3547 __ mov(v0, t2);
3548 } else {
3549 // Load result into f4.
3550 __ ldc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset));
3551 }
3552 __ Ret();
3553 } // if (CpuFeatures::IsSupported(FPU))
3554
3555 __ bind(&calculate);
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003556 Counters* counters = masm->isolate()->counters();
3557 __ IncrementCounter(
3558 counters->transcendental_cache_miss(), 1, scratch0, scratch1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003559 if (tagged) {
3560 __ bind(&invalid_cache);
3561 __ TailCallExternalReference(ExternalReference(RuntimeFunction(),
3562 masm->isolate()),
3563 1,
3564 1);
3565 } else {
yangguo@chromium.org304cc332012-07-24 07:59:48 +00003566 ASSERT(CpuFeatures::IsSupported(FPU));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003567 CpuFeatures::Scope scope(FPU);
3568
3569 Label no_update;
3570 Label skip_cache;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003571
3572 // Call C function to calculate the result and update the cache.
yangguo@chromium.org304cc332012-07-24 07:59:48 +00003573 // a0: precalculated cache entry address.
3574 // a2 and a3: parts of the double value.
3575 // Store a0, a2 and a3 on stack for later before calling C function.
3576 __ Push(a3, a2, cache_entry);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003577 GenerateCallCFunction(masm, scratch0);
3578 __ GetCFunctionDoubleResult(f4);
3579
3580 // Try to update the cache. If we cannot allocate a
3581 // heap number, we return the result without updating.
yangguo@chromium.org304cc332012-07-24 07:59:48 +00003582 __ Pop(a3, a2, cache_entry);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003583 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3584 __ AllocateHeapNumber(t2, scratch0, scratch1, t1, &no_update);
3585 __ sdc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset));
3586
3587 __ sw(a2, MemOperand(cache_entry, 0 * kPointerSize));
3588 __ sw(a3, MemOperand(cache_entry, 1 * kPointerSize));
3589 __ sw(t2, MemOperand(cache_entry, 2 * kPointerSize));
3590
ulan@chromium.org6ff65142012-03-21 09:52:17 +00003591 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003592 __ mov(v0, cache_entry);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003593
3594 __ bind(&invalid_cache);
3595 // The cache is invalid. Call runtime which will recreate the
3596 // cache.
3597 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3598 __ AllocateHeapNumber(a0, scratch0, scratch1, t1, &skip_cache);
3599 __ sdc1(f4, FieldMemOperand(a0, HeapNumber::kValueOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003600 {
3601 FrameScope scope(masm, StackFrame::INTERNAL);
3602 __ push(a0);
3603 __ CallRuntime(RuntimeFunction(), 1);
3604 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003605 __ ldc1(f4, FieldMemOperand(v0, HeapNumber::kValueOffset));
3606 __ Ret();
3607
3608 __ bind(&skip_cache);
3609 // Call C function to calculate the result and answer directly
3610 // without updating the cache.
3611 GenerateCallCFunction(masm, scratch0);
3612 __ GetCFunctionDoubleResult(f4);
3613 __ bind(&no_update);
3614
3615 // We return the value in f4 without adding it to the cache, but
3616 // we cause a scavenging GC so that future allocations will succeed.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003617 {
3618 FrameScope scope(masm, StackFrame::INTERNAL);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003619
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003620 // Allocate an aligned object larger than a HeapNumber.
3621 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
3622 __ li(scratch0, Operand(4 * kPointerSize));
3623 __ push(scratch0);
3624 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
3625 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003626 __ Ret();
3627 }
3628}
3629
3630
3631void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
3632 Register scratch) {
3633 __ push(ra);
3634 __ PrepareCallCFunction(2, scratch);
danno@chromium.org40cb8782011-05-25 07:58:50 +00003635 if (IsMipsSoftFloatABI) {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003636 __ Move(a0, a1, f4);
danno@chromium.org40cb8782011-05-25 07:58:50 +00003637 } else {
3638 __ mov_d(f12, f4);
3639 }
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003640 AllowExternalCallThatCantCauseGC scope(masm);
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003641 Isolate* isolate = masm->isolate();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003642 switch (type_) {
3643 case TranscendentalCache::SIN:
3644 __ CallCFunction(
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003645 ExternalReference::math_sin_double_function(isolate),
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003646 0, 1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003647 break;
3648 case TranscendentalCache::COS:
3649 __ CallCFunction(
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003650 ExternalReference::math_cos_double_function(isolate),
3651 0, 1);
3652 break;
3653 case TranscendentalCache::TAN:
3654 __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003655 0, 1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003656 break;
3657 case TranscendentalCache::LOG:
3658 __ CallCFunction(
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003659 ExternalReference::math_log_double_function(isolate),
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003660 0, 1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003661 break;
3662 default:
3663 UNIMPLEMENTED();
3664 break;
3665 }
3666 __ pop(ra);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003667}
3668
3669
3670Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003671 switch (type_) {
3672 // Add more cases when necessary.
3673 case TranscendentalCache::SIN: return Runtime::kMath_sin;
3674 case TranscendentalCache::COS: return Runtime::kMath_cos;
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003675 case TranscendentalCache::TAN: return Runtime::kMath_tan;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003676 case TranscendentalCache::LOG: return Runtime::kMath_log;
3677 default:
3678 UNIMPLEMENTED();
3679 return Runtime::kAbort;
3680 }
lrn@chromium.org7516f052011-03-30 08:52:27 +00003681}
3682
3683
3684void StackCheckStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003685 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00003686}
3687
3688
yangguo@chromium.org56454712012-02-16 15:33:53 +00003689void InterruptStub::Generate(MacroAssembler* masm) {
3690 __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
3691}
3692
3693
karlklose@chromium.org83a47282011-05-11 11:54:09 +00003694void MathPowStub::Generate(MacroAssembler* masm) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003695 CpuFeatures::Scope fpu_scope(FPU);
3696 const Register base = a1;
3697 const Register exponent = a2;
3698 const Register heapnumbermap = t1;
3699 const Register heapnumber = v0;
3700 const DoubleRegister double_base = f2;
3701 const DoubleRegister double_exponent = f4;
3702 const DoubleRegister double_result = f0;
3703 const DoubleRegister double_scratch = f6;
3704 const FPURegister single_scratch = f8;
3705 const Register scratch = t5;
3706 const Register scratch2 = t3;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003707
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003708 Label call_runtime, done, int_exponent;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003709 if (exponent_type_ == ON_STACK) {
3710 Label base_is_smi, unpack_exponent;
3711 // The exponent and base are supplied as arguments on the stack.
3712 // This can only happen if the stub is called from non-optimized code.
3713 // Load input parameters from stack to double registers.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003714 __ lw(base, MemOperand(sp, 1 * kPointerSize));
3715 __ lw(exponent, MemOperand(sp, 0 * kPointerSize));
3716
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003717 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003718
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003719 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003720 __ lw(scratch, FieldMemOperand(base, JSObject::kMapOffset));
3721 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003722
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003723 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
3724 __ jmp(&unpack_exponent);
3725
3726 __ bind(&base_is_smi);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003727 __ mtc1(scratch, single_scratch);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003728 __ cvt_d_w(double_base, single_scratch);
3729 __ bind(&unpack_exponent);
3730
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003731 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003732
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003733 __ lw(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
3734 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003735 __ ldc1(double_exponent,
3736 FieldMemOperand(exponent, HeapNumber::kValueOffset));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003737 } else if (exponent_type_ == TAGGED) {
3738 // Base is already in double_base.
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003739 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003740
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003741 __ ldc1(double_exponent,
3742 FieldMemOperand(exponent, HeapNumber::kValueOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003743 }
3744
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003745 if (exponent_type_ != INTEGER) {
3746 Label int_exponent_convert;
3747 // Detect integer exponents stored as double.
3748 __ EmitFPUTruncate(kRoundToMinusInf,
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003749 scratch,
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003750 double_exponent,
3751 at,
3752 double_scratch,
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003753 scratch2,
3754 kCheckForInexactConversion);
3755 // scratch2 == 0 means there was no conversion error.
3756 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
3757
3758 if (exponent_type_ == ON_STACK) {
3759 // Detect square root case. Crankshaft detects constant +/-0.5 at
3760 // compile time and uses DoMathPowHalf instead. We then skip this check
3761 // for non-constant cases of +/-0.5 as these hardly occur.
3762 Label not_plus_half;
3763
3764 // Test for 0.5.
3765 __ Move(double_scratch, 0.5);
3766 __ BranchF(USE_DELAY_SLOT,
3767 &not_plus_half,
3768 NULL,
3769 ne,
3770 double_exponent,
3771 double_scratch);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00003772 // double_scratch can be overwritten in the delay slot.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003773 // Calculates square root of base. Check for the special case of
3774 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
3775 __ Move(double_scratch, -V8_INFINITY);
3776 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
3777 __ neg_d(double_result, double_scratch);
3778
3779 // Add +0 to convert -0 to +0.
3780 __ add_d(double_scratch, double_base, kDoubleRegZero);
3781 __ sqrt_d(double_result, double_scratch);
3782 __ jmp(&done);
3783
3784 __ bind(&not_plus_half);
3785 __ Move(double_scratch, -0.5);
3786 __ BranchF(USE_DELAY_SLOT,
3787 &call_runtime,
3788 NULL,
3789 ne,
3790 double_exponent,
3791 double_scratch);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00003792 // double_scratch can be overwritten in the delay slot.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003793 // Calculates square root of base. Check for the special case of
3794 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
3795 __ Move(double_scratch, -V8_INFINITY);
3796 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
3797 __ Move(double_result, kDoubleRegZero);
3798
3799 // Add +0 to convert -0 to +0.
3800 __ add_d(double_scratch, double_base, kDoubleRegZero);
3801 __ Move(double_result, 1);
3802 __ sqrt_d(double_scratch, double_scratch);
3803 __ div_d(double_result, double_result, double_scratch);
3804 __ jmp(&done);
3805 }
3806
3807 __ push(ra);
3808 {
3809 AllowExternalCallThatCantCauseGC scope(masm);
svenpanne@chromium.org83130cf2012-11-30 10:13:25 +00003810 __ PrepareCallCFunction(0, 2, scratch2);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003811 __ SetCallCDoubleArguments(double_base, double_exponent);
3812 __ CallCFunction(
3813 ExternalReference::power_double_double_function(masm->isolate()),
3814 0, 2);
3815 }
3816 __ pop(ra);
3817 __ GetCFunctionDoubleResult(double_result);
3818 __ jmp(&done);
3819
3820 __ bind(&int_exponent_convert);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003821 }
3822
3823 // Calculate power with integer exponent.
3824 __ bind(&int_exponent);
3825
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003826 // Get two copies of exponent in the registers scratch and exponent.
3827 if (exponent_type_ == INTEGER) {
3828 __ mov(scratch, exponent);
3829 } else {
3830 // Exponent has previously been stored into scratch as untagged integer.
3831 __ mov(exponent, scratch);
3832 }
3833
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003834 __ mov_d(double_scratch, double_base); // Back up base.
3835 __ Move(double_result, 1.0);
3836
3837 // Get absolute value of exponent.
3838 Label positive_exponent;
3839 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
3840 __ Subu(scratch, zero_reg, scratch);
3841 __ bind(&positive_exponent);
3842
3843 Label while_true, no_carry, loop_end;
3844 __ bind(&while_true);
3845
3846 __ And(scratch2, scratch, 1);
3847
3848 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
3849 __ mul_d(double_result, double_result, double_scratch);
3850 __ bind(&no_carry);
3851
3852 __ sra(scratch, scratch, 1);
3853
3854 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
3855 __ mul_d(double_scratch, double_scratch, double_scratch);
3856
3857 __ Branch(&while_true);
3858
3859 __ bind(&loop_end);
3860
3861 __ Branch(&done, ge, exponent, Operand(zero_reg));
3862 __ Move(double_scratch, 1.0);
3863 __ div_d(double_result, double_scratch, double_result);
3864 // Test whether result is zero. Bail out to check for subnormal result.
3865 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
3866 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero);
3867
3868 // double_exponent may not contain the exponent value if the input was a
3869 // smi. We set it with exponent value before bailing out.
3870 __ mtc1(exponent, single_scratch);
3871 __ cvt_d_w(double_exponent, single_scratch);
3872
3873 // Returning or bailing out.
3874 Counters* counters = masm->isolate()->counters();
3875 if (exponent_type_ == ON_STACK) {
3876 // The arguments are still on the stack.
3877 __ bind(&call_runtime);
3878 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
3879
3880 // The stub is called from non-optimized code, which expects the result
3881 // as heap number in exponent.
3882 __ bind(&done);
3883 __ AllocateHeapNumber(
3884 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
3885 __ sdc1(double_result,
3886 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
3887 ASSERT(heapnumber.is(v0));
3888 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3889 __ DropAndRet(2);
3890 } else {
3891 __ push(ra);
3892 {
3893 AllowExternalCallThatCantCauseGC scope(masm);
3894 __ PrepareCallCFunction(0, 2, scratch);
3895 __ SetCallCDoubleArguments(double_base, double_exponent);
3896 __ CallCFunction(
3897 ExternalReference::power_double_double_function(masm->isolate()),
3898 0, 2);
3899 }
3900 __ pop(ra);
3901 __ GetCFunctionDoubleResult(double_result);
3902
3903 __ bind(&done);
3904 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
3905 __ Ret();
3906 }
karlklose@chromium.org83a47282011-05-11 11:54:09 +00003907}
3908
3909
lrn@chromium.org7516f052011-03-30 08:52:27 +00003910bool CEntryStub::NeedsImmovableCode() {
3911 return true;
3912}
3913
3914
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003915bool CEntryStub::IsPregenerated() {
3916 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
3917 result_size_ == 1;
3918}
3919
3920
3921void CodeStub::GenerateStubsAheadOfTime() {
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003922 CEntryStub::GenerateAheadOfTime();
3923 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
3924 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
3925 RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003926}
3927
3928
3929void CodeStub::GenerateFPStubs() {
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00003930 SaveFPRegsMode mode = CpuFeatures::IsSupported(FPU)
3931 ? kSaveFPRegs
3932 : kDontSaveFPRegs;
3933 CEntryStub save_doubles(1, mode);
3934 StoreBufferOverflowStub stub(mode);
3935 // These stubs might already be in the snapshot, detect that and don't
3936 // regenerate, which would lead to code stub initialization state being messed
3937 // up.
3938 Code* save_doubles_code = NULL;
3939 Code* store_buffer_overflow_code = NULL;
3940 if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) {
3941 if (CpuFeatures::IsSupported(FPU)) {
3942 CpuFeatures::Scope scope2(FPU);
3943 save_doubles_code = *save_doubles.GetCode();
3944 store_buffer_overflow_code = *stub.GetCode();
3945 } else {
3946 save_doubles_code = *save_doubles.GetCode();
3947 store_buffer_overflow_code = *stub.GetCode();
3948 }
3949 save_doubles_code->set_is_pregenerated(true);
3950 store_buffer_overflow_code->set_is_pregenerated(true);
3951 }
3952 ISOLATE->set_fp_stubs_generated(true);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003953}
3954
3955
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003956void CEntryStub::GenerateAheadOfTime() {
3957 CEntryStub stub(1, kDontSaveFPRegs);
3958 Handle<Code> code = stub.GetCode();
3959 code->set_is_pregenerated(true);
3960}
3961
3962
yangguo@chromium.org46a2a512013-01-18 16:29:40 +00003963static void JumpIfOOM(MacroAssembler* masm,
3964 Register value,
3965 Register scratch,
3966 Label* oom_label) {
3967 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
3968 STATIC_ASSERT(kFailureTag == 3);
3969 __ andi(scratch, value, 0xf);
3970 __ Branch(oom_label, eq, scratch, Operand(0xf));
3971}
3972
3973
lrn@chromium.org7516f052011-03-30 08:52:27 +00003974void CEntryStub::GenerateCore(MacroAssembler* masm,
3975 Label* throw_normal_exception,
3976 Label* throw_termination_exception,
3977 Label* throw_out_of_memory_exception,
3978 bool do_gc,
3979 bool always_allocate) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003980 // v0: result parameter for PerformGC, if any
3981 // s0: number of arguments including receiver (C callee-saved)
3982 // s1: pointer to the first argument (C callee-saved)
3983 // s2: pointer to builtin function (C callee-saved)
3984
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003985 Isolate* isolate = masm->isolate();
3986
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003987 if (do_gc) {
3988 // Move result passed in v0 into a0 to call PerformGC.
3989 __ mov(a0, v0);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00003990 __ PrepareCallCFunction(1, 0, a1);
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003991 __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1, 0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003992 }
3993
3994 ExternalReference scope_depth =
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00003995 ExternalReference::heap_always_allocate_scope_depth(isolate);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00003996 if (always_allocate) {
3997 __ li(a0, Operand(scope_depth));
3998 __ lw(a1, MemOperand(a0));
3999 __ Addu(a1, a1, Operand(1));
4000 __ sw(a1, MemOperand(a0));
4001 }
4002
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004003 // Prepare arguments for C routine.
4004 // a0 = argc
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004005 __ mov(a0, s0);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004006 // a1 = argv (set in the delay slot after find_ra below).
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004007
4008 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
4009 // also need to reserve the 4 argument slots on the stack.
4010
4011 __ AssertStackIsAligned();
4012
4013 __ li(a2, Operand(ExternalReference::isolate_address()));
4014
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004015 // To let the GC traverse the return address of the exit frames, we need to
4016 // know where the return address is. The CEntryStub is unmovable, so
4017 // we can store the address on the stack to be able to find it again and
4018 // we never have to restore it, because it will not change.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004019 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
4020 // This branch-and-link sequence is needed to find the current PC on mips,
4021 // saved to the ra register.
4022 // Use masm-> here instead of the double-underscore macro since extra
4023 // coverage code can interfere with the proper calculation of ra.
4024 Label find_ra;
4025 masm->bal(&find_ra); // bal exposes branch delay slot.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004026 masm->mov(a1, s1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004027 masm->bind(&find_ra);
4028
4029 // Adjust the value in ra to point to the correct return location, 2nd
4030 // instruction past the real call into C code (the jalr(t9)), and push it.
4031 // This is the return address of the exit frame.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004032 const int kNumInstructionsToJump = 5;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004033 masm->Addu(ra, ra, kNumInstructionsToJump * kPointerSize);
4034 masm->sw(ra, MemOperand(sp)); // This spot was reserved in EnterExitFrame.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004035 // Stack space reservation moved to the branch delay slot below.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004036 // Stack is still aligned.
4037
4038 // Call the C routine.
4039 masm->mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
4040 masm->jalr(t9);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004041 // Set up sp in the delay slot.
4042 masm->addiu(sp, sp, -kCArgsSlotsSize);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004043 // Make sure the stored 'ra' points to this position.
4044 ASSERT_EQ(kNumInstructionsToJump,
4045 masm->InstructionsGeneratedSince(&find_ra));
4046 }
4047
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004048 if (always_allocate) {
4049 // It's okay to clobber a2 and a3 here. v0 & v1 contain result.
4050 __ li(a2, Operand(scope_depth));
4051 __ lw(a3, MemOperand(a2));
4052 __ Subu(a3, a3, Operand(1));
4053 __ sw(a3, MemOperand(a2));
4054 }
4055
4056 // Check for failure result.
4057 Label failure_returned;
4058 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4059 __ addiu(a2, v0, 1);
4060 __ andi(t0, a2, kFailureTagMask);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004061 __ Branch(USE_DELAY_SLOT, &failure_returned, eq, t0, Operand(zero_reg));
4062 // Restore stack (remove arg slots) in branch delay slot.
4063 __ addiu(sp, sp, kCArgsSlotsSize);
4064
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004065
4066 // Exit C frame and return.
4067 // v0:v1: result
4068 // sp: stack pointer
4069 // fp: frame pointer
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004070 __ LeaveExitFrame(save_doubles_, s0, true);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004071
4072 // Check if we should retry or throw exception.
4073 Label retry;
4074 __ bind(&failure_returned);
4075 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4076 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
4077 __ Branch(&retry, eq, t0, Operand(zero_reg));
4078
4079 // Special handling of out of memory exceptions.
yangguo@chromium.org46a2a512013-01-18 16:29:40 +00004080 JumpIfOOM(masm, v0, t0, throw_out_of_memory_exception);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004081
4082 // Retrieve the pending exception and clear the variable.
danno@chromium.org88aa0582012-03-23 15:11:57 +00004083 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00004084 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004085 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004086 __ lw(v0, MemOperand(t0));
4087 __ sw(a3, MemOperand(t0));
4088
4089 // Special handling of termination exceptions which are uncatchable
4090 // by javascript code.
danno@chromium.org88aa0582012-03-23 15:11:57 +00004091 __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
4092 __ Branch(throw_termination_exception, eq, v0, Operand(t0));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004093
4094 // Handle normal exception.
4095 __ jmp(throw_normal_exception);
4096
4097 __ bind(&retry);
4098 // Last failure (v0) will be moved to (a0) for parameter when retrying.
lrn@chromium.org7516f052011-03-30 08:52:27 +00004099}
4100
4101
4102void CEntryStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004103 // Called from JavaScript; parameters are on stack as if calling JS function
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004104 // s0: number of arguments including receiver
4105 // s1: size of arguments excluding receiver
4106 // s2: pointer to builtin function
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004107 // fp: frame pointer (restored after C call)
4108 // sp: stack pointer (restored as callee's sp after C call)
4109 // cp: current context (C callee-saved)
4110
4111 // NOTE: Invocations of builtins may return failure objects
4112 // instead of a proper result. The builtin entry handles
4113 // this by performing a garbage collection and retrying the
4114 // builtin once.
4115
ulan@chromium.org6ff65142012-03-21 09:52:17 +00004116 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
4117 // The reason for this is that these arguments would need to be saved anyway
4118 // so it's faster to set them up directly.
4119 // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction.
4120
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004121 // Compute the argv pointer in a callee-saved register.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004122 __ Addu(s1, sp, s1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004123
4124 // Enter the exit frame that transitions from JavaScript to C++.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004125 FrameScope scope(masm, StackFrame::MANUAL);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004126 __ EnterExitFrame(save_doubles_);
4127
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004128 // s0: number of arguments (C callee-saved)
4129 // s1: pointer to first argument (C callee-saved)
4130 // s2: pointer to builtin function (C callee-saved)
4131
4132 Label throw_normal_exception;
4133 Label throw_termination_exception;
4134 Label throw_out_of_memory_exception;
4135
4136 // Call into the runtime system.
4137 GenerateCore(masm,
4138 &throw_normal_exception,
4139 &throw_termination_exception,
4140 &throw_out_of_memory_exception,
4141 false,
4142 false);
4143
4144 // Do space-specific GC and retry runtime call.
4145 GenerateCore(masm,
4146 &throw_normal_exception,
4147 &throw_termination_exception,
4148 &throw_out_of_memory_exception,
4149 true,
4150 false);
4151
4152 // Do full GC and retry runtime call one final time.
4153 Failure* failure = Failure::InternalError();
4154 __ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
4155 GenerateCore(masm,
4156 &throw_normal_exception,
4157 &throw_termination_exception,
4158 &throw_out_of_memory_exception,
4159 true,
4160 true);
4161
4162 __ bind(&throw_out_of_memory_exception);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004163 // Set external caught exception to false.
4164 Isolate* isolate = masm->isolate();
4165 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
4166 isolate);
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00004167 __ li(a0, Operand(false, RelocInfo::NONE32));
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004168 __ li(a2, Operand(external_caught));
4169 __ sw(a0, MemOperand(a2));
4170
4171 // Set pending exception and v0 to out of memory exception.
yangguo@chromium.org46a2a512013-01-18 16:29:40 +00004172 Label already_have_failure;
4173 JumpIfOOM(masm, v0, t0, &already_have_failure);
4174 Failure* out_of_memory = Failure::OutOfMemoryException(0x1);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004175 __ li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
yangguo@chromium.org46a2a512013-01-18 16:29:40 +00004176 __ bind(&already_have_failure);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004177 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
4178 isolate)));
4179 __ sw(v0, MemOperand(a2));
4180 // Fall through to the next label.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004181
4182 __ bind(&throw_termination_exception);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004183 __ ThrowUncatchable(v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004184
4185 __ bind(&throw_normal_exception);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004186 __ Throw(v0);
lrn@chromium.org7516f052011-03-30 08:52:27 +00004187}
4188
4189
4190void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
mstarzinger@chromium.orgf8c6bd52011-11-23 12:13:52 +00004191 Label invoke, handler_entry, exit;
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004192 Isolate* isolate = masm->isolate();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004193
4194 // Registers:
4195 // a0: entry address
4196 // a1: function
ulan@chromium.org2efb9002012-01-19 15:36:35 +00004197 // a2: receiver
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004198 // a3: argc
4199 //
4200 // Stack:
4201 // 4 args slots
4202 // args
4203
4204 // Save callee saved registers on the stack.
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00004205 __ MultiPush(kCalleeSaved | ra.bit());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004206
fschneider@chromium.org1805e212011-09-05 10:49:12 +00004207 if (CpuFeatures::IsSupported(FPU)) {
4208 CpuFeatures::Scope scope(FPU);
4209 // Save callee-saved FPU registers.
4210 __ MultiPushFPU(kCalleeSavedFPU);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004211 // Set up the reserved register for 0.0.
4212 __ Move(kDoubleRegZero, 0.0);
fschneider@chromium.org1805e212011-09-05 10:49:12 +00004213 }
4214
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004215
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004216 // Load argv in s0 register.
fschneider@chromium.org1805e212011-09-05 10:49:12 +00004217 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
4218 if (CpuFeatures::IsSupported(FPU)) {
4219 offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
4220 }
4221
danno@chromium.org88aa0582012-03-23 15:11:57 +00004222 __ InitializeRootRegister();
fschneider@chromium.org1805e212011-09-05 10:49:12 +00004223 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004224
4225 // We build an EntryFrame.
4226 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
4227 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4228 __ li(t2, Operand(Smi::FromInt(marker)));
4229 __ li(t1, Operand(Smi::FromInt(marker)));
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00004230 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004231 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004232 __ lw(t0, MemOperand(t0));
4233 __ Push(t3, t2, t1, t0);
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00004234 // Set up frame pointer for the frame to be pushed.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004235 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
4236
4237 // Registers:
4238 // a0: entry_address
4239 // a1: function
ulan@chromium.org2efb9002012-01-19 15:36:35 +00004240 // a2: receiver_pointer
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004241 // a3: argc
4242 // s0: argv
4243 //
4244 // Stack:
4245 // caller fp |
4246 // function slot | entry frame
4247 // context slot |
4248 // bad fp (0xff...f) |
4249 // callee saved registers + ra
4250 // 4 args slots
4251 // args
4252
whesse@chromium.org030d38e2011-07-13 13:23:34 +00004253 // If this is the outermost JS call, set js_entry_sp value.
4254 Label non_outermost_js;
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004255 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
whesse@chromium.org030d38e2011-07-13 13:23:34 +00004256 __ li(t1, Operand(ExternalReference(js_entry_sp)));
4257 __ lw(t2, MemOperand(t1));
4258 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
4259 __ sw(fp, MemOperand(t1));
4260 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4261 Label cont;
4262 __ b(&cont);
4263 __ nop(); // Branch delay slot nop.
4264 __ bind(&non_outermost_js);
4265 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4266 __ bind(&cont);
4267 __ push(t0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004268
mstarzinger@chromium.orgf8c6bd52011-11-23 12:13:52 +00004269 // Jump to a faked try block that does the invoke, with a faked catch
4270 // block that sets the pending exception.
4271 __ jmp(&invoke);
4272 __ bind(&handler_entry);
4273 handler_offset_ = handler_entry.pos();
4274 // Caught exception: Store result (exception) in the pending exception
4275 // field in the JSEnv and return a failure sentinel. Coming in here the
4276 // fp will be invalid because the PushTryHandler below sets it to 0 to
4277 // signal the existence of the JSEntry frame.
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00004278 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004279 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004280 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
4281 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
4282 __ b(&exit); // b exposes branch delay slot.
4283 __ nop(); // Branch delay slot nop.
4284
mstarzinger@chromium.orgf8c6bd52011-11-23 12:13:52 +00004285 // Invoke: Link this frame into the handler chain. There's only one
4286 // handler block in this code object, so its index is 0.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004287 __ bind(&invoke);
yangguo@chromium.org78d1ad42012-02-09 13:53:47 +00004288 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004289 // If an exception not caught by another handler occurs, this handler
4290 // returns control to the code after the bal(&invoke) above, which
4291 // restores all kCalleeSaved registers (including cp and fp) to their
4292 // saved values before returning a failure to C.
4293
4294 // Clear any pending exceptions.
danno@chromium.org88aa0582012-03-23 15:11:57 +00004295 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00004296 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004297 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004298 __ sw(t1, MemOperand(t0));
4299
4300 // Invoke the function by calling through JS entry trampoline builtin.
4301 // Notice that we cannot store a reference to the trampoline code directly in
4302 // this stub, because runtime stubs are not traversed when doing GC.
4303
4304 // Registers:
4305 // a0: entry_address
4306 // a1: function
ulan@chromium.org2efb9002012-01-19 15:36:35 +00004307 // a2: receiver_pointer
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004308 // a3: argc
4309 // s0: argv
4310 //
4311 // Stack:
4312 // handler frame
4313 // entry frame
4314 // callee saved registers + ra
4315 // 4 args slots
4316 // args
4317
4318 if (is_construct) {
4319 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004320 isolate);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004321 __ li(t0, Operand(construct_entry));
4322 } else {
4323 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
4324 __ li(t0, Operand(entry));
4325 }
4326 __ lw(t9, MemOperand(t0)); // Deref address.
4327
4328 // Call JSEntryTrampoline.
4329 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
4330 __ Call(t9);
4331
danno@chromium.org40cb8782011-05-25 07:58:50 +00004332 // Unlink this frame from the handler chain.
4333 __ PopTryHandler();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004334
danno@chromium.org40cb8782011-05-25 07:58:50 +00004335 __ bind(&exit); // v0 holds result
whesse@chromium.org030d38e2011-07-13 13:23:34 +00004336 // Check if the current stack frame is marked as the outermost JS frame.
4337 Label non_outermost_js_2;
4338 __ pop(t1);
danno@chromium.org88aa0582012-03-23 15:11:57 +00004339 __ Branch(&non_outermost_js_2,
4340 ne,
4341 t1,
whesse@chromium.org030d38e2011-07-13 13:23:34 +00004342 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4343 __ li(t1, Operand(ExternalReference(js_entry_sp)));
4344 __ sw(zero_reg, MemOperand(t1));
4345 __ bind(&non_outermost_js_2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004346
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004347 // Restore the top frame descriptors from the stack.
4348 __ pop(t1);
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00004349 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00004350 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004351 __ sw(t1, MemOperand(t0));
4352
4353 // Reset the stack to the callee saved registers.
4354 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
4355
fschneider@chromium.org1805e212011-09-05 10:49:12 +00004356 if (CpuFeatures::IsSupported(FPU)) {
4357 CpuFeatures::Scope scope(FPU);
4358 // Restore callee-saved fpu registers.
4359 __ MultiPopFPU(kCalleeSavedFPU);
4360 }
4361
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004362 // Restore callee saved registers from the stack.
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00004363 __ MultiPop(kCalleeSaved | ra.bit());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004364 // Return.
4365 __ Jump(ra);
lrn@chromium.org7516f052011-03-30 08:52:27 +00004366}
4367
4368
danno@chromium.org40cb8782011-05-25 07:58:50 +00004369// Uses registers a0 to t0.
4370// Expected input (depending on whether args are in registers or on the stack):
4371// * object: a0 or at sp + 1 * kPointerSize.
4372// * function: a1 or at sp.
4373//
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004374// An inlined call site may have been generated before calling this stub.
4375// In this case the offset to the inline site to patch is passed on the stack,
4376// in the safepoint slot for register t0.
lrn@chromium.org7516f052011-03-30 08:52:27 +00004377void InstanceofStub::Generate(MacroAssembler* masm) {
danno@chromium.org40cb8782011-05-25 07:58:50 +00004378 // Call site inlining and patching implies arguments in registers.
4379 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4380 // ReturnTrueFalse is only implemented for inlined call sites.
4381 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
4382
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004383 // Fixed register usage throughout the stub:
4384 const Register object = a0; // Object (lhs).
danno@chromium.org40cb8782011-05-25 07:58:50 +00004385 Register map = a3; // Map of the object.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004386 const Register function = a1; // Function (rhs).
4387 const Register prototype = t0; // Prototype of the function.
danno@chromium.org40cb8782011-05-25 07:58:50 +00004388 const Register inline_site = t5;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004389 const Register scratch = a2;
danno@chromium.org40cb8782011-05-25 07:58:50 +00004390
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004391 const int32_t kDeltaToLoadBoolResult = 5 * kPointerSize;
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004392
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004393 Label slow, loop, is_instance, is_not_instance, not_js_object;
danno@chromium.org40cb8782011-05-25 07:58:50 +00004394
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004395 if (!HasArgsInRegisters()) {
4396 __ lw(object, MemOperand(sp, 1 * kPointerSize));
4397 __ lw(function, MemOperand(sp, 0));
4398 }
4399
4400 // Check that the left hand is a JS object and load map.
4401 __ JumpIfSmi(object, &not_js_object);
4402 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
4403
danno@chromium.org40cb8782011-05-25 07:58:50 +00004404 // If there is a call site cache don't look in the global cache, but do the
4405 // real lookup and update the call site cache.
4406 if (!HasCallSiteInlineCheck()) {
4407 Label miss;
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004408 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
4409 __ Branch(&miss, ne, function, Operand(at));
4410 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
4411 __ Branch(&miss, ne, map, Operand(at));
danno@chromium.org40cb8782011-05-25 07:58:50 +00004412 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4413 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004414
danno@chromium.org40cb8782011-05-25 07:58:50 +00004415 __ bind(&miss);
4416 }
4417
4418 // Get the prototype of the function.
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00004419 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004420
4421 // Check that the function prototype is a JS object.
4422 __ JumpIfSmi(prototype, &slow);
4423 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4424
danno@chromium.org40cb8782011-05-25 07:58:50 +00004425 // Update the global instanceof or call site inlined cache with the current
4426 // map and function. The cached answer will be set when it is known below.
4427 if (!HasCallSiteInlineCheck()) {
4428 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
4429 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
4430 } else {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004431 ASSERT(HasArgsInRegisters());
4432 // Patch the (relocated) inlined map check.
4433
4434 // The offset was stored in t0 safepoint slot.
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004435 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004436 __ LoadFromSafepointRegisterSlot(scratch, t0);
4437 __ Subu(inline_site, ra, scratch);
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004438 // Get the map location in scratch and patch it.
4439 __ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
4440 __ sw(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
danno@chromium.org40cb8782011-05-25 07:58:50 +00004441 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004442
4443 // Register mapping: a3 is object map and t0 is function prototype.
4444 // Get prototype of object into a2.
4445 __ lw(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
4446
danno@chromium.org40cb8782011-05-25 07:58:50 +00004447 // We don't need map any more. Use it as a scratch register.
4448 Register scratch2 = map;
4449 map = no_reg;
4450
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004451 // Loop through the prototype chain looking for the function prototype.
danno@chromium.org40cb8782011-05-25 07:58:50 +00004452 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004453 __ bind(&loop);
4454 __ Branch(&is_instance, eq, scratch, Operand(prototype));
danno@chromium.org40cb8782011-05-25 07:58:50 +00004455 __ Branch(&is_not_instance, eq, scratch, Operand(scratch2));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004456 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
4457 __ lw(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
4458 __ Branch(&loop);
4459
4460 __ bind(&is_instance);
4461 ASSERT(Smi::FromInt(0) == 0);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004462 if (!HasCallSiteInlineCheck()) {
4463 __ mov(v0, zero_reg);
4464 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4465 } else {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004466 // Patch the call site to return true.
4467 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4468 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4469 // Get the boolean result location in scratch and patch it.
4470 __ PatchRelocatedValue(inline_site, scratch, v0);
4471
4472 if (!ReturnTrueFalseObject()) {
4473 ASSERT_EQ(Smi::FromInt(0), 0);
4474 __ mov(v0, zero_reg);
4475 }
danno@chromium.org40cb8782011-05-25 07:58:50 +00004476 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004477 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4478
4479 __ bind(&is_not_instance);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004480 if (!HasCallSiteInlineCheck()) {
4481 __ li(v0, Operand(Smi::FromInt(1)));
4482 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4483 } else {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004484 // Patch the call site to return false.
4485 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4486 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
4487 // Get the boolean result location in scratch and patch it.
4488 __ PatchRelocatedValue(inline_site, scratch, v0);
4489
4490 if (!ReturnTrueFalseObject()) {
4491 __ li(v0, Operand(Smi::FromInt(1)));
4492 }
danno@chromium.org40cb8782011-05-25 07:58:50 +00004493 }
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004494
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004495 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4496
4497 Label object_not_null, object_not_null_or_smi;
4498 __ bind(&not_js_object);
4499 // Before null, smi and string value checks, check that the rhs is a function
4500 // as for a non-function rhs an exception needs to be thrown.
4501 __ JumpIfSmi(function, &slow);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004502 __ GetObjectType(function, scratch2, scratch);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004503 __ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
4504
4505 // Null is not instance of anything.
danno@chromium.org88aa0582012-03-23 15:11:57 +00004506 __ Branch(&object_not_null,
4507 ne,
4508 scratch,
4509 Operand(masm->isolate()->factory()->null_value()));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004510 __ li(v0, Operand(Smi::FromInt(1)));
4511 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4512
4513 __ bind(&object_not_null);
4514 // Smi values are not instances of anything.
4515 __ JumpIfNotSmi(object, &object_not_null_or_smi);
4516 __ li(v0, Operand(Smi::FromInt(1)));
4517 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4518
4519 __ bind(&object_not_null_or_smi);
4520 // String values are not instances of anything.
4521 __ IsObjectJSStringType(object, scratch, &slow);
4522 __ li(v0, Operand(Smi::FromInt(1)));
4523 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4524
4525 // Slow-case. Tail call builtin.
4526 __ bind(&slow);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004527 if (!ReturnTrueFalseObject()) {
4528 if (HasArgsInRegisters()) {
4529 __ Push(a0, a1);
4530 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004531 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
danno@chromium.org40cb8782011-05-25 07:58:50 +00004532 } else {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00004533 {
4534 FrameScope scope(masm, StackFrame::INTERNAL);
4535 __ Push(a0, a1);
4536 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
4537 }
danno@chromium.org40cb8782011-05-25 07:58:50 +00004538 __ mov(a0, v0);
4539 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4540 __ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg));
4541 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4542 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4543 }
lrn@chromium.org7516f052011-03-30 08:52:27 +00004544}
4545
4546
mvstanton@chromium.org6bec0092013-01-23 13:46:53 +00004547void ArrayLengthStub::Generate(MacroAssembler* masm) {
4548 Label miss;
4549 Register receiver;
4550 if (kind() == Code::KEYED_LOAD_IC) {
4551 // ----------- S t a t e -------------
4552 // -- ra : return address
4553 // -- a0 : key
4554 // -- a1 : receiver
4555 // -----------------------------------
4556 __ Branch(&miss, ne, a0,
4557 Operand(masm->isolate()->factory()->length_symbol()));
4558 receiver = a1;
4559 } else {
4560 ASSERT(kind() == Code::LOAD_IC);
4561 // ----------- S t a t e -------------
4562 // -- a2 : name
4563 // -- ra : return address
4564 // -- a0 : receiver
4565 // -- sp[0] : receiver
4566 // -----------------------------------
4567 receiver = a0;
4568 }
4569
4570 StubCompiler::GenerateLoadArrayLength(masm, receiver, a3, &miss);
4571 __ bind(&miss);
4572 StubCompiler::GenerateLoadMiss(masm, kind());
4573}
4574
4575
4576void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
4577 Label miss;
4578 Register receiver;
4579 if (kind() == Code::KEYED_LOAD_IC) {
4580 // ----------- S t a t e -------------
4581 // -- ra : return address
4582 // -- a0 : key
4583 // -- a1 : receiver
4584 // -----------------------------------
4585 __ Branch(&miss, ne, a0,
4586 Operand(masm->isolate()->factory()->prototype_symbol()));
4587 receiver = a1;
4588 } else {
4589 ASSERT(kind() == Code::LOAD_IC);
4590 // ----------- S t a t e -------------
4591 // -- a2 : name
4592 // -- ra : return address
4593 // -- a0 : receiver
4594 // -- sp[0] : receiver
4595 // -----------------------------------
4596 receiver = a0;
4597 }
4598
4599 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0, &miss);
4600 __ bind(&miss);
4601 StubCompiler::GenerateLoadMiss(masm, kind());
4602}
4603
4604
4605void StringLengthStub::Generate(MacroAssembler* masm) {
4606 Label miss;
4607 Register receiver;
4608 if (kind() == Code::KEYED_LOAD_IC) {
4609 // ----------- S t a t e -------------
4610 // -- ra : return address
4611 // -- a0 : key
4612 // -- a1 : receiver
4613 // -----------------------------------
4614 __ Branch(&miss, ne, a0,
4615 Operand(masm->isolate()->factory()->length_symbol()));
4616 receiver = a1;
4617 } else {
4618 ASSERT(kind() == Code::LOAD_IC);
4619 // ----------- S t a t e -------------
4620 // -- a2 : name
4621 // -- ra : return address
4622 // -- a0 : receiver
4623 // -- sp[0] : receiver
4624 // -----------------------------------
4625 receiver = a0;
4626 }
4627
4628 StubCompiler::GenerateLoadStringLength(masm, receiver, a3, t0, &miss,
4629 support_wrapper_);
4630
4631 __ bind(&miss);
4632 StubCompiler::GenerateLoadMiss(masm, kind());
4633}
4634
4635
mstarzinger@chromium.org068ea0a2013-01-30 09:39:44 +00004636void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
4637 // This accepts as a receiver anything JSArray::SetElementsLength accepts
4638 // (currently anything except for external arrays which means anything with
4639 // elements of FixedArray type). Value must be a number, but only smis are
4640 // accepted as the most common case.
4641 Label miss;
4642
4643 Register receiver;
4644 Register value;
4645 if (kind() == Code::KEYED_STORE_IC) {
4646 // ----------- S t a t e -------------
4647 // -- ra : return address
4648 // -- a0 : value
4649 // -- a1 : key
4650 // -- a2 : receiver
4651 // -----------------------------------
4652 __ Branch(&miss, ne, a1,
4653 Operand(masm->isolate()->factory()->length_symbol()));
4654 receiver = a2;
4655 value = a0;
4656 } else {
4657 ASSERT(kind() == Code::STORE_IC);
4658 // ----------- S t a t e -------------
4659 // -- ra : return address
4660 // -- a0 : value
4661 // -- a1 : receiver
4662 // -- a2 : key
4663 // -----------------------------------
4664 receiver = a1;
4665 value = a0;
4666 }
4667 Register scratch = a3;
4668
4669 // Check that the receiver isn't a smi.
4670 __ JumpIfSmi(receiver, &miss);
4671
4672 // Check that the object is a JS array.
4673 __ GetObjectType(receiver, scratch, scratch);
4674 __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
4675
4676 // Check that elements are FixedArray.
4677 // We rely on StoreIC_ArrayLength below to deal with all types of
4678 // fast elements (including COW).
4679 __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
4680 __ GetObjectType(scratch, scratch, scratch);
4681 __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
4682
4683 // Check that the array has fast properties, otherwise the length
4684 // property might have been redefined.
4685 __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
4686 __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
4687 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
4688 __ Branch(&miss, eq, scratch, Operand(at));
4689
4690 // Check that value is a smi.
4691 __ JumpIfNotSmi(value, &miss);
4692
4693 // Prepare tail call to StoreIC_ArrayLength.
4694 __ Push(receiver, value);
4695
4696 ExternalReference ref =
4697 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
4698 __ TailCallExternalReference(ref, 2, 1);
4699
4700 __ bind(&miss);
4701
4702 StubCompiler::GenerateStoreMiss(masm, kind());
4703}
4704
4705
danno@chromium.org40cb8782011-05-25 07:58:50 +00004706Register InstanceofStub::left() { return a0; }
4707
4708
4709Register InstanceofStub::right() { return a1; }
4710
4711
lrn@chromium.org7516f052011-03-30 08:52:27 +00004712void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004713 // The displacement is the offset of the last parameter (if any)
4714 // relative to the frame pointer.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00004715 const int kDisplacement =
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004716 StandardFrameConstants::kCallerSPOffset - kPointerSize;
4717
4718 // Check that the key is a smiGenerateReadElement.
4719 Label slow;
4720 __ JumpIfNotSmi(a1, &slow);
4721
4722 // Check if the calling frame is an arguments adaptor frame.
4723 Label adaptor;
4724 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4725 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4726 __ Branch(&adaptor,
4727 eq,
4728 a3,
4729 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4730
4731 // Check index (a1) against formal parameters count limit passed in
4732 // through register a0. Use unsigned comparison to get negative
4733 // check for free.
4734 __ Branch(&slow, hs, a1, Operand(a0));
4735
4736 // Read the argument from the stack and return it.
4737 __ subu(a3, a0, a1);
4738 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize);
4739 __ Addu(a3, fp, Operand(t3));
4740 __ lw(v0, MemOperand(a3, kDisplacement));
4741 __ Ret();
4742
4743 // Arguments adaptor case: Check index (a1) against actual arguments
4744 // limit found in the arguments adaptor frame. Use unsigned
4745 // comparison to get negative check for free.
4746 __ bind(&adaptor);
4747 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4748 __ Branch(&slow, Ugreater_equal, a1, Operand(a0));
4749
4750 // Read the argument from the adaptor frame and return it.
4751 __ subu(a3, a0, a1);
4752 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize);
4753 __ Addu(a3, a2, Operand(t3));
4754 __ lw(v0, MemOperand(a3, kDisplacement));
4755 __ Ret();
4756
4757 // Slow-case: Handle non-smi or out-of-bounds access to arguments
4758 // by calling the runtime system.
4759 __ bind(&slow);
4760 __ push(a1);
4761 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00004762}
4763
4764
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004765void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004766 // sp[0] : number of parameters
4767 // sp[4] : receiver displacement
4768 // sp[8] : function
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004769 // Check if the calling frame is an arguments adaptor frame.
4770 Label runtime;
4771 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4772 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
danno@chromium.org88aa0582012-03-23 15:11:57 +00004773 __ Branch(&runtime,
4774 ne,
4775 a2,
4776 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00004777
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004778 // Patch the arguments.length and the parameters pointer in the current frame.
4779 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
4780 __ sw(a2, MemOperand(sp, 0 * kPointerSize));
4781 __ sll(t3, a2, 1);
4782 __ Addu(a3, a3, Operand(t3));
4783 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset);
4784 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
4785
4786 __ bind(&runtime);
4787 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
4788}
4789
4790
4791void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
4792 // Stack layout:
4793 // sp[0] : number of parameters (tagged)
4794 // sp[4] : address of receiver argument
4795 // sp[8] : function
4796 // Registers used over whole function:
4797 // t2 : allocated object (tagged)
4798 // t5 : mapped parameter count (tagged)
4799
4800 __ lw(a1, MemOperand(sp, 0 * kPointerSize));
4801 // a1 = parameter count (tagged)
4802
4803 // Check if the calling frame is an arguments adaptor frame.
4804 Label runtime;
4805 Label adaptor_frame, try_allocate;
4806 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4807 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
danno@chromium.org88aa0582012-03-23 15:11:57 +00004808 __ Branch(&adaptor_frame,
4809 eq,
4810 a2,
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004811 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4812
4813 // No adaptor, parameter count = argument count.
4814 __ mov(a2, a1);
4815 __ b(&try_allocate);
4816 __ nop(); // Branch delay slot nop.
4817
4818 // We have an adaptor frame. Patch the parameters pointer.
4819 __ bind(&adaptor_frame);
4820 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
4821 __ sll(t6, a2, 1);
4822 __ Addu(a3, a3, Operand(t6));
4823 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
4824 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
4825
4826 // a1 = parameter count (tagged)
4827 // a2 = argument count (tagged)
4828 // Compute the mapped parameter count = min(a1, a2) in a1.
4829 Label skip_min;
4830 __ Branch(&skip_min, lt, a1, Operand(a2));
4831 __ mov(a1, a2);
4832 __ bind(&skip_min);
4833
4834 __ bind(&try_allocate);
4835
4836 // Compute the sizes of backing store, parameter map, and arguments object.
4837 // 1. Parameter map, has 2 extra words containing context and backing store.
4838 const int kParameterMapHeaderSize =
4839 FixedArray::kHeaderSize + 2 * kPointerSize;
4840 // If there are no mapped parameters, we do not need the parameter_map.
4841 Label param_map_size;
4842 ASSERT_EQ(0, Smi::FromInt(0));
4843 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a1, Operand(zero_reg));
4844 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when a1 == 0.
4845 __ sll(t5, a1, 1);
4846 __ addiu(t5, t5, kParameterMapHeaderSize);
4847 __ bind(&param_map_size);
4848
4849 // 2. Backing store.
4850 __ sll(t6, a2, 1);
4851 __ Addu(t5, t5, Operand(t6));
4852 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
4853
4854 // 3. Arguments object.
4855 __ Addu(t5, t5, Operand(Heap::kArgumentsObjectSize));
4856
4857 // Do the allocation of all three objects in one go.
4858 __ AllocateInNewSpace(t5, v0, a3, t0, &runtime, TAG_OBJECT);
4859
4860 // v0 = address of new object(s) (tagged)
4861 // a2 = argument count (tagged)
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00004862 // Get the arguments boilerplate from the current native context into t0.
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004863 const int kNormalOffset =
4864 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
4865 const int kAliasedOffset =
4866 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
4867
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00004868 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4869 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset));
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004870 Label skip2_ne, skip2_eq;
4871 __ Branch(&skip2_ne, ne, a1, Operand(zero_reg));
4872 __ lw(t0, MemOperand(t0, kNormalOffset));
4873 __ bind(&skip2_ne);
4874
4875 __ Branch(&skip2_eq, eq, a1, Operand(zero_reg));
4876 __ lw(t0, MemOperand(t0, kAliasedOffset));
4877 __ bind(&skip2_eq);
4878
4879 // v0 = address of new object (tagged)
4880 // a1 = mapped parameter count (tagged)
4881 // a2 = argument count (tagged)
4882 // t0 = address of boilerplate object (tagged)
4883 // Copy the JS object part.
4884 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
4885 __ lw(a3, FieldMemOperand(t0, i));
4886 __ sw(a3, FieldMemOperand(v0, i));
4887 }
4888
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00004889 // Set up the callee in-object property.
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004890 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
4891 __ lw(a3, MemOperand(sp, 2 * kPointerSize));
4892 const int kCalleeOffset = JSObject::kHeaderSize +
4893 Heap::kArgumentsCalleeIndex * kPointerSize;
4894 __ sw(a3, FieldMemOperand(v0, kCalleeOffset));
4895
4896 // Use the length (smi tagged) and set that as an in-object property too.
4897 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
4898 const int kLengthOffset = JSObject::kHeaderSize +
4899 Heap::kArgumentsLengthIndex * kPointerSize;
4900 __ sw(a2, FieldMemOperand(v0, kLengthOffset));
4901
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00004902 // Set up the elements pointer in the allocated arguments object.
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00004903 // If we allocated a parameter map, t0 will point there, otherwise
4904 // it will point to the backing store.
4905 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize));
4906 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
4907
4908 // v0 = address of new object (tagged)
4909 // a1 = mapped parameter count (tagged)
4910 // a2 = argument count (tagged)
4911 // t0 = address of parameter map or backing store (tagged)
4912 // Initialize parameter map. If there are no mapped arguments, we're done.
4913 Label skip_parameter_map;
4914 Label skip3;
4915 __ Branch(&skip3, ne, a1, Operand(Smi::FromInt(0)));
4916 // Move backing store address to a3, because it is
4917 // expected there when filling in the unmapped arguments.
4918 __ mov(a3, t0);
4919 __ bind(&skip3);
4920
4921 __ Branch(&skip_parameter_map, eq, a1, Operand(Smi::FromInt(0)));
4922
4923 __ LoadRoot(t2, Heap::kNonStrictArgumentsElementsMapRootIndex);
4924 __ sw(t2, FieldMemOperand(t0, FixedArray::kMapOffset));
4925 __ Addu(t2, a1, Operand(Smi::FromInt(2)));
4926 __ sw(t2, FieldMemOperand(t0, FixedArray::kLengthOffset));
4927 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
4928 __ sll(t6, a1, 1);
4929 __ Addu(t2, t0, Operand(t6));
4930 __ Addu(t2, t2, Operand(kParameterMapHeaderSize));
4931 __ sw(t2, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
4932
4933 // Copy the parameter slots and the holes in the arguments.
4934 // We need to fill in mapped_parameter_count slots. They index the context,
4935 // where parameters are stored in reverse order, at
4936 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4937 // The mapped parameter thus need to get indices
4938 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4939 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4940 // We loop from right to left.
4941 Label parameters_loop, parameters_test;
4942 __ mov(t2, a1);
4943 __ lw(t5, MemOperand(sp, 0 * kPointerSize));
4944 __ Addu(t5, t5, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4945 __ Subu(t5, t5, Operand(a1));
4946 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
4947 __ sll(t6, t2, 1);
4948 __ Addu(a3, t0, Operand(t6));
4949 __ Addu(a3, a3, Operand(kParameterMapHeaderSize));
4950
4951 // t2 = loop variable (tagged)
4952 // a1 = mapping index (tagged)
4953 // a3 = address of backing store (tagged)
4954 // t0 = address of parameter map (tagged)
4955 // t1 = temporary scratch (a.o., for address calculation)
4956 // t3 = the hole value
4957 __ jmp(&parameters_test);
4958
4959 __ bind(&parameters_loop);
4960 __ Subu(t2, t2, Operand(Smi::FromInt(1)));
4961 __ sll(t1, t2, 1);
4962 __ Addu(t1, t1, Operand(kParameterMapHeaderSize - kHeapObjectTag));
4963 __ Addu(t6, t0, t1);
4964 __ sw(t5, MemOperand(t6));
4965 __ Subu(t1, t1, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
4966 __ Addu(t6, a3, t1);
4967 __ sw(t3, MemOperand(t6));
4968 __ Addu(t5, t5, Operand(Smi::FromInt(1)));
4969 __ bind(&parameters_test);
4970 __ Branch(&parameters_loop, ne, t2, Operand(Smi::FromInt(0)));
4971
4972 __ bind(&skip_parameter_map);
4973 // a2 = argument count (tagged)
4974 // a3 = address of backing store (tagged)
4975 // t1 = scratch
4976 // Copy arguments header and remaining slots (if there are any).
4977 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
4978 __ sw(t1, FieldMemOperand(a3, FixedArray::kMapOffset));
4979 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset));
4980
4981 Label arguments_loop, arguments_test;
4982 __ mov(t5, a1);
4983 __ lw(t0, MemOperand(sp, 1 * kPointerSize));
4984 __ sll(t6, t5, 1);
4985 __ Subu(t0, t0, Operand(t6));
4986 __ jmp(&arguments_test);
4987
4988 __ bind(&arguments_loop);
4989 __ Subu(t0, t0, Operand(kPointerSize));
4990 __ lw(t2, MemOperand(t0, 0));
4991 __ sll(t6, t5, 1);
4992 __ Addu(t1, a3, Operand(t6));
4993 __ sw(t2, FieldMemOperand(t1, FixedArray::kHeaderSize));
4994 __ Addu(t5, t5, Operand(Smi::FromInt(1)));
4995
4996 __ bind(&arguments_test);
4997 __ Branch(&arguments_loop, lt, t5, Operand(a2));
4998
4999 // Return and remove the on-stack parameters.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005000 __ DropAndRet(3);
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005001
5002 // Do the runtime call to allocate the arguments object.
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00005003 // a2 = argument count (tagged)
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005004 __ bind(&runtime);
5005 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
5006 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
5007}
5008
5009
5010void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
5011 // sp[0] : number of parameters
5012 // sp[4] : receiver displacement
5013 // sp[8] : function
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005014 // Check if the calling frame is an arguments adaptor frame.
5015 Label adaptor_frame, try_allocate, runtime;
5016 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5017 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
5018 __ Branch(&adaptor_frame,
5019 eq,
5020 a3,
5021 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5022
5023 // Get the length from the frame.
5024 __ lw(a1, MemOperand(sp, 0));
5025 __ Branch(&try_allocate);
5026
5027 // Patch the arguments.length and the parameters pointer.
5028 __ bind(&adaptor_frame);
5029 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
5030 __ sw(a1, MemOperand(sp, 0));
5031 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize);
5032 __ Addu(a3, a2, Operand(at));
5033
5034 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
5035 __ sw(a3, MemOperand(sp, 1 * kPointerSize));
5036
5037 // Try the new space allocation. Start out with computing the size
5038 // of the arguments object and the elements array in words.
5039 Label add_arguments_object;
5040 __ bind(&try_allocate);
5041 __ Branch(&add_arguments_object, eq, a1, Operand(zero_reg));
5042 __ srl(a1, a1, kSmiTagSize);
5043
5044 __ Addu(a1, a1, Operand(FixedArray::kHeaderSize / kPointerSize));
5045 __ bind(&add_arguments_object);
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005046 __ Addu(a1, a1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005047
5048 // Do the allocation of both objects in one go.
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005049 __ AllocateInNewSpace(a1,
5050 v0,
5051 a2,
5052 a3,
5053 &runtime,
5054 static_cast<AllocationFlags>(TAG_OBJECT |
5055 SIZE_IN_WORDS));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005056
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00005057 // Get the arguments boilerplate from the current native context.
5058 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
5059 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset));
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005060 __ lw(t0, MemOperand(t0, Context::SlotOffset(
5061 Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005062
5063 // Copy the JS object part.
5064 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize);
5065
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005066 // Get the length (smi tagged) and set that as an in-object property too.
5067 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
5068 __ lw(a1, MemOperand(sp, 0 * kPointerSize));
5069 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize +
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005070 Heap::kArgumentsLengthIndex * kPointerSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005071
5072 Label done;
5073 __ Branch(&done, eq, a1, Operand(zero_reg));
5074
5075 // Get the parameters pointer from the stack.
5076 __ lw(a2, MemOperand(sp, 1 * kPointerSize));
5077
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00005078 // Set up the elements pointer in the allocated arguments object and
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005079 // initialize the header in the elements fixed array.
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005080 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005081 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
5082 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
5083 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset));
5084 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset));
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005085 // Untag the length for the loop.
5086 __ srl(a1, a1, kSmiTagSize);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005087
5088 // Copy the fixed array slots.
5089 Label loop;
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00005090 // Set up t0 to point to the first array slot.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005091 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5092 __ bind(&loop);
5093 // Pre-decrement a2 with kPointerSize on each iteration.
5094 // Pre-decrement in order to skip receiver.
5095 __ Addu(a2, a2, Operand(-kPointerSize));
5096 __ lw(a3, MemOperand(a2));
5097 // Post-increment t0 with kPointerSize on each iteration.
5098 __ sw(a3, MemOperand(t0));
5099 __ Addu(t0, t0, Operand(kPointerSize));
5100 __ Subu(a1, a1, Operand(1));
5101 __ Branch(&loop, ne, a1, Operand(zero_reg));
5102
5103 // Return and remove the on-stack parameters.
5104 __ bind(&done);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005105 __ DropAndRet(3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005106
5107 // Do the runtime call to allocate the arguments object.
5108 __ bind(&runtime);
lrn@chromium.orgac2828d2011-06-23 06:29:21 +00005109 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005110}
5111
5112
5113void RegExpExecStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005114 // Just jump directly to runtime if native RegExp is not selected at compile
5115 // time or if regexp entry in generated code is turned off runtime switch or
5116 // at compilation.
5117#ifdef V8_INTERPRETED_REGEXP
5118 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
5119#else // V8_INTERPRETED_REGEXP
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005120
5121 // Stack frame on entry.
5122 // sp[0]: last_match_info (expected JSArray)
5123 // sp[4]: previous index
5124 // sp[8]: subject string
5125 // sp[12]: JSRegExp object
5126
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00005127 const int kLastMatchInfoOffset = 0 * kPointerSize;
5128 const int kPreviousIndexOffset = 1 * kPointerSize;
5129 const int kSubjectOffset = 2 * kPointerSize;
5130 const int kJSRegExpOffset = 3 * kPointerSize;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005131
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005132 Isolate* isolate = masm->isolate();
5133
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005134 Label runtime, invoke_regexp;
5135
5136 // Allocation of registers for this function. These are in callee save
5137 // registers and will be preserved by the call to the native RegExp code, as
5138 // this code is called using the normal C calling convention. When calling
5139 // directly from generated code the native RegExp code will not do a GC and
5140 // therefore the content of these registers are safe to use after the call.
5141 // MIPS - using s0..s2, since we are not using CEntry Stub.
5142 Register subject = s0;
5143 Register regexp_data = s1;
5144 Register last_match_info_elements = s2;
5145
5146 // Ensure that a RegExp stack is allocated.
5147 ExternalReference address_of_regexp_stack_memory_address =
5148 ExternalReference::address_of_regexp_stack_memory_address(
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005149 isolate);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005150 ExternalReference address_of_regexp_stack_memory_size =
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005151 ExternalReference::address_of_regexp_stack_memory_size(isolate);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005152 __ li(a0, Operand(address_of_regexp_stack_memory_size));
5153 __ lw(a0, MemOperand(a0, 0));
5154 __ Branch(&runtime, eq, a0, Operand(zero_reg));
5155
5156 // Check that the first argument is a JSRegExp object.
5157 __ lw(a0, MemOperand(sp, kJSRegExpOffset));
5158 STATIC_ASSERT(kSmiTag == 0);
5159 __ JumpIfSmi(a0, &runtime);
5160 __ GetObjectType(a0, a1, a1);
5161 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
5162
5163 // Check that the RegExp has been compiled (data contains a fixed array).
5164 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
5165 if (FLAG_debug_code) {
5166 __ And(t0, regexp_data, Operand(kSmiTagMask));
5167 __ Check(nz,
5168 "Unexpected type for RegExp data, FixedArray expected",
5169 t0,
5170 Operand(zero_reg));
5171 __ GetObjectType(regexp_data, a0, a0);
5172 __ Check(eq,
5173 "Unexpected type for RegExp data, FixedArray expected",
5174 a0,
5175 Operand(FIXED_ARRAY_TYPE));
5176 }
5177
5178 // regexp_data: RegExp data (FixedArray)
5179 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
5180 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
5181 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
5182
5183 // regexp_data: RegExp data (FixedArray)
5184 // Check that the number of captures fit in the static offsets vector buffer.
5185 __ lw(a2,
5186 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
5187 // Calculate number of capture registers (number_of_captures + 1) * 2. This
5188 // uses the asumption that smis are 2 * their untagged value.
5189 STATIC_ASSERT(kSmiTag == 0);
5190 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5191 __ Addu(a2, a2, Operand(2)); // a2 was a smi.
5192 // Check that the static offsets vector buffer is large enough.
yangguo@chromium.org355cfd12012-08-29 15:32:24 +00005193 __ Branch(
5194 &runtime, hi, a2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005195
5196 // a2: Number of capture registers
5197 // regexp_data: RegExp data (FixedArray)
5198 // Check that the second argument is a string.
5199 __ lw(subject, MemOperand(sp, kSubjectOffset));
5200 __ JumpIfSmi(subject, &runtime);
5201 __ GetObjectType(subject, a0, a0);
5202 __ And(a0, a0, Operand(kIsNotStringMask));
5203 STATIC_ASSERT(kStringTag == 0);
5204 __ Branch(&runtime, ne, a0, Operand(zero_reg));
5205
5206 // Get the length of the string to r3.
5207 __ lw(a3, FieldMemOperand(subject, String::kLengthOffset));
5208
5209 // a2: Number of capture registers
5210 // a3: Length of subject string as a smi
5211 // subject: Subject string
5212 // regexp_data: RegExp data (FixedArray)
5213 // Check that the third argument is a positive smi less than the subject
5214 // string length. A negative value will be greater (unsigned comparison).
5215 __ lw(a0, MemOperand(sp, kPreviousIndexOffset));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00005216 __ JumpIfNotSmi(a0, &runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005217 __ Branch(&runtime, ls, a3, Operand(a0));
5218
5219 // a2: Number of capture registers
5220 // subject: Subject string
5221 // regexp_data: RegExp data (FixedArray)
5222 // Check that the fourth object is a JSArray object.
5223 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset));
5224 __ JumpIfSmi(a0, &runtime);
5225 __ GetObjectType(a0, a1, a1);
5226 __ Branch(&runtime, ne, a1, Operand(JS_ARRAY_TYPE));
5227 // Check that the JSArray is in fast case.
5228 __ lw(last_match_info_elements,
5229 FieldMemOperand(a0, JSArray::kElementsOffset));
5230 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
5231 __ Branch(&runtime, ne, a0, Operand(
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005232 isolate->factory()->fixed_array_map()));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005233 // Check that the last match info has space for the capture registers and the
5234 // additional information.
5235 __ lw(a0,
5236 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
5237 __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead));
5238 __ sra(at, a0, kSmiTagSize); // Untag length for comparison.
5239 __ Branch(&runtime, gt, a2, Operand(at));
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005240
5241 // Reset offset for possibly sliced string.
5242 __ mov(t0, zero_reg);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005243 // subject: Subject string
5244 // regexp_data: RegExp data (FixedArray)
5245 // Check the representation and encoding of the subject string.
5246 Label seq_string;
5247 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
5248 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00005249 // First check for flat string. None of the following string type tests will
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005250 // succeed if subject is not a string or a short external string.
5251 __ And(a1,
5252 a0,
5253 Operand(kIsNotStringMask |
5254 kStringRepresentationMask |
5255 kShortExternalStringMask));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005256 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
yangguo@chromium.org80c42ed2011-08-31 09:03:56 +00005257 __ Branch(&seq_string, eq, a1, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005258
5259 // subject: Subject string
5260 // a0: instance type if Subject string
5261 // regexp_data: RegExp data (FixedArray)
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00005262 // a1: whether subject is a string and if yes, its string representation
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005263 // Check for flat cons string or sliced string.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005264 // A flat cons string is a cons string where the second part is the empty
5265 // string. In that case the subject string is just the first part of the cons
5266 // string. Also in this case the first part of the cons string is known to be
5267 // a sequential string or an external string.
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005268 // In the case of a sliced string its offset has to be taken into account.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005269 Label cons_string, external_string, check_encoding;
yangguo@chromium.org80c42ed2011-08-31 09:03:56 +00005270 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
5271 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00005272 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005273 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
yangguo@chromium.org80c42ed2011-08-31 09:03:56 +00005274 __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005275 __ Branch(&external_string, eq, a1, Operand(kExternalStringTag));
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005276
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005277 // Catch non-string subject or short external string.
5278 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
5279 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00005280 __ Branch(&runtime, ne, at, Operand(zero_reg));
5281
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005282 // String is sliced.
5283 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
5284 __ sra(t0, t0, kSmiTagSize);
5285 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
5286 // t5: offset of sliced string, smi-tagged.
5287 __ jmp(&check_encoding);
5288 // String is a cons string, check whether it is flat.
5289 __ bind(&cons_string);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005290 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
5291 __ LoadRoot(a1, Heap::kEmptyStringRootIndex);
5292 __ Branch(&runtime, ne, a0, Operand(a1));
5293 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005294 // Is first part of cons or parent of slice a flat string?
5295 __ bind(&check_encoding);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005296 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
5297 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005298 STATIC_ASSERT(kSeqStringTag == 0);
5299 __ And(at, a0, Operand(kStringRepresentationMask));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005300 __ Branch(&external_string, ne, at, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005301
5302 __ bind(&seq_string);
5303 // subject: Subject string
5304 // regexp_data: RegExp data (FixedArray)
5305 // a0: Instance type of subject string
5306 STATIC_ASSERT(kStringEncodingMask == 4);
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +00005307 STATIC_ASSERT(kOneByteStringTag == 4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005308 STATIC_ASSERT(kTwoByteStringTag == 0);
5309 // Find the code object based on the assumptions above.
ulan@chromium.org2efb9002012-01-19 15:36:35 +00005310 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ASCII.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005311 __ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset));
ulan@chromium.org2efb9002012-01-19 15:36:35 +00005312 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005313 __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00005314 __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005315
5316 // Check that the irregexp code has been generated for the actual string
sgjesse@chromium.org6db88712011-07-11 11:41:22 +00005317 // encoding. If it has, the field contains a code object otherwise it contains
5318 // a smi (code flushing support).
5319 __ JumpIfSmi(t9, &runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005320
5321 // a3: encoding of subject string (1 if ASCII, 0 if two_byte);
5322 // t9: code
5323 // subject: Subject string
5324 // regexp_data: RegExp data (FixedArray)
5325 // Load used arguments before starting to push arguments for call to native
5326 // RegExp code to avoid handling changing stack height.
5327 __ lw(a1, MemOperand(sp, kPreviousIndexOffset));
5328 __ sra(a1, a1, kSmiTagSize); // Untag the Smi.
5329
5330 // a1: previous index
5331 // a3: encoding of subject string (1 if ASCII, 0 if two_byte);
5332 // t9: code
5333 // subject: Subject string
5334 // regexp_data: RegExp data (FixedArray)
5335 // All checks done. Now push arguments for native regexp code.
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005336 __ IncrementCounter(isolate->counters()->regexp_entry_native(),
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005337 1, a0, a2);
5338
5339 // Isolates: note we add an additional parameter here (isolate pointer).
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005340 const int kRegExpExecuteArguments = 9;
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00005341 const int kParameterRegisters = 4;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005342 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
5343
5344 // Stack pointer now points to cell where return address is to be written.
5345 // Arguments are before that on the stack or in registers, meaning we
5346 // treat the return address as argument 5. Thus every argument after that
5347 // needs to be shifted back by 1. Since DirectCEntryStub will handle
5348 // allocating space for the c argument slots, we don't need to calculate
5349 // that into the argument positions on the stack. This is how the stack will
5350 // look (sp meaning the value of sp at this moment):
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005351 // [sp + 5] - Argument 9
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005352 // [sp + 4] - Argument 8
5353 // [sp + 3] - Argument 7
5354 // [sp + 2] - Argument 6
5355 // [sp + 1] - Argument 5
5356 // [sp + 0] - saved ra
5357
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005358 // Argument 9: Pass current isolate address.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005359 // CFunctionArgumentOperand handles MIPS stack argument slots.
5360 __ li(a0, Operand(ExternalReference::isolate_address()));
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005361 __ sw(a0, MemOperand(sp, 5 * kPointerSize));
5362
5363 // Argument 8: Indicate that this is a direct call from JavaScript.
5364 __ li(a0, Operand(1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005365 __ sw(a0, MemOperand(sp, 4 * kPointerSize));
5366
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005367 // Argument 7: Start (high end) of backtracking stack memory area.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005368 __ li(a0, Operand(address_of_regexp_stack_memory_address));
5369 __ lw(a0, MemOperand(a0, 0));
5370 __ li(a2, Operand(address_of_regexp_stack_memory_size));
5371 __ lw(a2, MemOperand(a2, 0));
5372 __ addu(a0, a0, a2);
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005373 __ sw(a0, MemOperand(sp, 3 * kPointerSize));
5374
5375 // Argument 6: Set the number of capture registers to zero to force global
5376 // regexps to behave as non-global. This does not affect non-global regexps.
5377 __ mov(a0, zero_reg);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005378 __ sw(a0, MemOperand(sp, 2 * kPointerSize));
5379
5380 // Argument 5: static offsets vector buffer.
5381 __ li(a0, Operand(
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005382 ExternalReference::address_of_static_offsets_vector(isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005383 __ sw(a0, MemOperand(sp, 1 * kPointerSize));
5384
5385 // For arguments 4 and 3 get string length, calculate start of string data
5386 // and calculate the shift of the index (0 for ASCII and 1 for two byte).
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005387 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005388 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005389 // Load the length from the original subject string from the previous stack
5390 // frame. Therefore we have to use fp, which points exactly to two pointer
5391 // sizes below the previous sp. (Because creating a new stack frame pushes
5392 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
jkummerow@chromium.org486075a2011-09-07 12:44:28 +00005393 __ lw(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005394 // If slice offset is not 0, load the length from the original sliced string.
5395 // Argument 4, a3: End of string data
5396 // Argument 3, a2: Start of string data
5397 // Prepare start and end index of the input.
5398 __ sllv(t1, t0, a3);
5399 __ addu(t0, t2, t1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005400 __ sllv(t1, a1, a3);
5401 __ addu(a2, t0, t1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005402
jkummerow@chromium.org486075a2011-09-07 12:44:28 +00005403 __ lw(t2, FieldMemOperand(subject, String::kLengthOffset));
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005404 __ sra(t2, t2, kSmiTagSize);
5405 __ sllv(t1, t2, a3);
5406 __ addu(a3, t0, t1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005407 // Argument 2 (a1): Previous index.
5408 // Already there
5409
5410 // Argument 1 (a0): Subject string.
jkummerow@chromium.org486075a2011-09-07 12:44:28 +00005411 __ mov(a0, subject);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005412
5413 // Locate the code entry and call it.
5414 __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
5415 DirectCEntryStub stub;
5416 stub.GenerateCall(masm, t9);
5417
5418 __ LeaveExitFrame(false, no_reg);
5419
5420 // v0: result
5421 // subject: subject string (callee saved)
5422 // regexp_data: RegExp data (callee saved)
5423 // last_match_info_elements: Last match info elements (callee saved)
5424
5425 // Check the result.
5426
5427 Label success;
jkummerow@chromium.org777db6f2012-05-24 09:33:09 +00005428 __ Branch(&success, eq, v0, Operand(1));
5429 // We expect exactly one result since we force the called regexp to behave
5430 // as non-global.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005431 Label failure;
danno@chromium.org88aa0582012-03-23 15:11:57 +00005432 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005433 // If not exception it can only be retry. Handle that in the runtime system.
danno@chromium.org88aa0582012-03-23 15:11:57 +00005434 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005435 // Result must now be exception. If there is no pending exception already a
5436 // stack overflow (on the backtrack stack) was detected in RegExp code but
5437 // haven't created the exception yet. Handle that in the runtime system.
5438 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005439 __ li(a1, Operand(isolate->factory()->the_hole_value()));
kmillikin@chromium.org83e16822011-09-13 08:21:47 +00005440 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005441 isolate)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005442 __ lw(v0, MemOperand(a2, 0));
jkummerow@chromium.org486075a2011-09-07 12:44:28 +00005443 __ Branch(&runtime, eq, v0, Operand(a1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005444
5445 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception.
5446
5447 // Check if the exception is a termination. If so, throw as uncatchable.
5448 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
5449 Label termination_exception;
jkummerow@chromium.org486075a2011-09-07 12:44:28 +00005450 __ Branch(&termination_exception, eq, v0, Operand(a0));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005451
ulan@chromium.org65a89c22012-02-14 11:46:07 +00005452 __ Throw(v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005453
5454 __ bind(&termination_exception);
ulan@chromium.org65a89c22012-02-14 11:46:07 +00005455 __ ThrowUncatchable(v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005456
5457 __ bind(&failure);
5458 // For failure and exception return null.
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005459 __ li(v0, Operand(isolate->factory()->null_value()));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005460 __ DropAndRet(4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005461
5462 // Process the result from the native regexp code.
5463 __ bind(&success);
5464 __ lw(a1,
5465 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
5466 // Calculate number of capture registers (number_of_captures + 1) * 2.
5467 STATIC_ASSERT(kSmiTag == 0);
5468 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5469 __ Addu(a1, a1, Operand(2)); // a1 was a smi.
5470
5471 // a1: number of capture registers
5472 // subject: subject string
5473 // Store the capture count.
5474 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi.
5475 __ sw(a2, FieldMemOperand(last_match_info_elements,
5476 RegExpImpl::kLastCaptureCountOffset));
5477 // Store last subject and last input.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005478 __ sw(subject,
5479 FieldMemOperand(last_match_info_elements,
5480 RegExpImpl::kLastSubjectOffset));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005481 __ mov(a2, subject);
5482 __ RecordWriteField(last_match_info_elements,
5483 RegExpImpl::kLastSubjectOffset,
5484 a2,
5485 t3,
5486 kRAHasNotBeenSaved,
5487 kDontSaveFPRegs);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005488 __ sw(subject,
5489 FieldMemOperand(last_match_info_elements,
5490 RegExpImpl::kLastInputOffset));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005491 __ RecordWriteField(last_match_info_elements,
5492 RegExpImpl::kLastInputOffset,
5493 subject,
5494 t3,
5495 kRAHasNotBeenSaved,
5496 kDontSaveFPRegs);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005497
5498 // Get the static offsets vector filled by the native regexp code.
5499 ExternalReference address_of_static_offsets_vector =
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005500 ExternalReference::address_of_static_offsets_vector(isolate);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005501 __ li(a2, Operand(address_of_static_offsets_vector));
5502
5503 // a1: number of capture registers
5504 // a2: offsets vector
5505 Label next_capture, done;
5506 // Capture register counter starts from number of capture registers and
5507 // counts down until wrapping after zero.
5508 __ Addu(a0,
5509 last_match_info_elements,
5510 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
5511 __ bind(&next_capture);
5512 __ Subu(a1, a1, Operand(1));
5513 __ Branch(&done, lt, a1, Operand(zero_reg));
5514 // Read the value from the static offsets vector buffer.
5515 __ lw(a3, MemOperand(a2, 0));
5516 __ addiu(a2, a2, kPointerSize);
5517 // Store the smi value in the last match info.
5518 __ sll(a3, a3, kSmiTagSize); // Convert to Smi.
5519 __ sw(a3, MemOperand(a0, 0));
5520 __ Branch(&next_capture, USE_DELAY_SLOT);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005521 __ addiu(a0, a0, kPointerSize); // In branch delay slot.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005522
5523 __ bind(&done);
5524
5525 // Return last match info.
5526 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005527 __ DropAndRet(4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005528
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005529 // External string. Short external strings have already been ruled out.
5530 // a0: scratch
5531 __ bind(&external_string);
5532 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
5533 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
5534 if (FLAG_debug_code) {
5535 // Assert that we do not have a cons or slice (indirect strings) here.
5536 // Sequential strings have already been ruled out.
5537 __ And(at, a0, Operand(kIsIndirectStringMask));
5538 __ Assert(eq,
5539 "external string expected, but not found",
5540 at,
5541 Operand(zero_reg));
5542 }
5543 __ lw(subject,
5544 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
5545 // Move the pointer so that offset-wise, it looks like a sequential string.
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00005546 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005547 __ Subu(subject,
5548 subject,
5549 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
5550 __ jmp(&seq_string);
5551
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005552 // Do the runtime call to execute the regexp.
5553 __ bind(&runtime);
5554 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
5555#endif // V8_INTERPRETED_REGEXP
lrn@chromium.org7516f052011-03-30 08:52:27 +00005556}
5557
5558
5559void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005560 const int kMaxInlineLength = 100;
5561 Label slowcase;
5562 Label done;
5563 __ lw(a1, MemOperand(sp, kPointerSize * 2));
5564 STATIC_ASSERT(kSmiTag == 0);
5565 STATIC_ASSERT(kSmiTagSize == 1);
5566 __ JumpIfNotSmi(a1, &slowcase);
5567 __ Branch(&slowcase, hi, a1, Operand(Smi::FromInt(kMaxInlineLength)));
5568 // Smi-tagging is equivalent to multiplying by 2.
5569 // Allocate RegExpResult followed by FixedArray with size in ebx.
5570 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
5571 // Elements: [Map][Length][..elements..]
5572 // Size of JSArray with two in-object properties and the header of a
5573 // FixedArray.
5574 int objects_size =
5575 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
5576 __ srl(t1, a1, kSmiTagSize + kSmiShiftSize);
5577 __ Addu(a2, t1, Operand(objects_size));
5578 __ AllocateInNewSpace(
5579 a2, // In: Size, in words.
5580 v0, // Out: Start of allocation (tagged).
5581 a3, // Scratch register.
5582 t0, // Scratch register.
5583 &slowcase,
5584 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
5585 // v0: Start of allocated area, object-tagged.
5586 // a1: Number of elements in array, as smi.
5587 // t1: Number of elements, untagged.
5588
5589 // Set JSArray map to global.regexp_result_map().
5590 // Set empty properties FixedArray.
5591 // Set elements to point to FixedArray allocated right after the JSArray.
5592 // Interleave operations for better latency.
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00005593 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005594 __ Addu(a3, v0, Operand(JSRegExpResult::kSize));
5595 __ li(t0, Operand(masm->isolate()->factory()->empty_fixed_array()));
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00005596 __ lw(a2, FieldMemOperand(a2, GlobalObject::kNativeContextOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005597 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
5598 __ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX));
5599 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
5600 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
5601
5602 // Set input, index and length fields from arguments.
5603 __ lw(a1, MemOperand(sp, kPointerSize * 0));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005604 __ lw(a2, MemOperand(sp, kPointerSize * 1));
5605 __ lw(t2, MemOperand(sp, kPointerSize * 2));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005606 __ sw(a1, FieldMemOperand(v0, JSRegExpResult::kInputOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005607 __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset));
5608 __ sw(t2, FieldMemOperand(v0, JSArray::kLengthOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005609
5610 // Fill out the elements FixedArray.
5611 // v0: JSArray, tagged.
5612 // a3: FixedArray, tagged.
5613 // t1: Number of elements in array, untagged.
5614
5615 // Set map.
5616 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map()));
5617 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
5618 // Set FixedArray length.
5619 __ sll(t2, t1, kSmiTagSize);
5620 __ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset));
ulan@chromium.org56c14af2012-09-20 12:51:09 +00005621 // Fill contents of fixed-array with undefined.
5622 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005623 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
ulan@chromium.org56c14af2012-09-20 12:51:09 +00005624 // Fill fixed array elements with undefined.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005625 // v0: JSArray, tagged.
ulan@chromium.org56c14af2012-09-20 12:51:09 +00005626 // a2: undefined.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005627 // a3: Start of elements in FixedArray.
5628 // t1: Number of elements to fill.
5629 Label loop;
5630 __ sll(t1, t1, kPointerSizeLog2); // Convert num elements to num bytes.
5631 __ addu(t1, t1, a3); // Point past last element to store.
5632 __ bind(&loop);
5633 __ Branch(&done, ge, a3, Operand(t1)); // Break when a3 past end of elem.
5634 __ sw(a2, MemOperand(a3));
5635 __ Branch(&loop, USE_DELAY_SLOT);
5636 __ addiu(a3, a3, kPointerSize); // In branch delay slot.
5637
5638 __ bind(&done);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00005639 __ DropAndRet(3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005640
5641 __ bind(&slowcase);
5642 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005643}
5644
5645
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005646static void GenerateRecordCallTarget(MacroAssembler* masm) {
5647 // Cache the called function in a global property cell. Cache states
5648 // are uninitialized, monomorphic (indicated by a JSFunction), and
5649 // megamorphic.
5650 // a1 : the function to call
5651 // a2 : cache cell for call target
5652 Label done;
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005653
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005654 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
5655 masm->isolate()->heap()->undefined_value());
5656 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
5657 masm->isolate()->heap()->the_hole_value());
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005658
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005659 // Load the cache state into a3.
5660 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005661
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005662 // A monomorphic cache hit or an already megamorphic state: invoke the
5663 // function without changing the state.
5664 __ Branch(&done, eq, a3, Operand(a1));
5665 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5666 __ Branch(&done, eq, a3, Operand(at));
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005667
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005668 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
5669 // megamorphic.
5670 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
ulan@chromium.org812308e2012-02-29 15:58:45 +00005671
5672 __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at));
5673 // An uninitialized cache is patched with the function.
5674 // Store a1 in the delay slot. This may or may not get overwritten depending
5675 // on the result of the comparison.
5676 __ sw(a1, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
5677 // No need for a write barrier here - cells are rescanned.
5678
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005679 // MegamorphicSentinel is an immortal immovable object (undefined) so no
5680 // write-barrier is needed.
5681 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5682 __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005683
5684 __ bind(&done);
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00005685}
5686
5687
lrn@chromium.org7516f052011-03-30 08:52:27 +00005688void CallFunctionStub::Generate(MacroAssembler* masm) {
danno@chromium.orgc612e022011-11-10 11:38:15 +00005689 // a1 : the function to call
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005690 // a2 : cache cell for call target
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005691 Label slow, non_function;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005692
danno@chromium.org40cb8782011-05-25 07:58:50 +00005693 // The receiver might implicitly be the global object. This is
5694 // indicated by passing the hole as the receiver to the call
5695 // function stub.
5696 if (ReceiverMightBeImplicit()) {
5697 Label call;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005698 // Get the receiver from the stack.
5699 // function, receiver [, arguments]
danno@chromium.org40cb8782011-05-25 07:58:50 +00005700 __ lw(t0, MemOperand(sp, argc_ * kPointerSize));
5701 // Call as function is indicated with the hole.
5702 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5703 __ Branch(&call, ne, t0, Operand(at));
5704 // Patch the receiver on the stack with the global receiver object.
yangguo@chromium.org46839fb2012-08-28 09:06:19 +00005705 __ lw(a3,
5706 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
mstarzinger@chromium.org88d326b2012-04-23 12:57:22 +00005707 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalReceiverOffset));
5708 __ sw(a3, MemOperand(sp, argc_ * kPointerSize));
danno@chromium.org40cb8782011-05-25 07:58:50 +00005709 __ bind(&call);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005710 }
5711
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005712 // Check that the function is really a JavaScript function.
5713 // a1: pushed function (to be verified)
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005714 __ JumpIfSmi(a1, &non_function);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005715 // Get the map of the function object.
mstarzinger@chromium.org88d326b2012-04-23 12:57:22 +00005716 __ GetObjectType(a1, a3, a3);
5717 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
5718
5719 if (RecordCallTarget()) {
5720 GenerateRecordCallTarget(masm);
5721 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005722
5723 // Fast-case: Invoke the function now.
5724 // a1: pushed function
5725 ParameterCount actual(argc_);
danno@chromium.org40cb8782011-05-25 07:58:50 +00005726
5727 if (ReceiverMightBeImplicit()) {
5728 Label call_as_function;
5729 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5730 __ Branch(&call_as_function, eq, t0, Operand(at));
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00005731 __ InvokeFunction(a1,
5732 actual,
5733 JUMP_FUNCTION,
5734 NullCallWrapper(),
5735 CALL_AS_METHOD);
danno@chromium.org40cb8782011-05-25 07:58:50 +00005736 __ bind(&call_as_function);
5737 }
5738 __ InvokeFunction(a1,
5739 actual,
5740 JUMP_FUNCTION,
5741 NullCallWrapper(),
5742 CALL_AS_FUNCTION);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005743
5744 // Slow-case: Non-function called.
5745 __ bind(&slow);
mstarzinger@chromium.org88d326b2012-04-23 12:57:22 +00005746 if (RecordCallTarget()) {
5747 // If there is a call target cache, mark it megamorphic in the
5748 // non-function case. MegamorphicSentinel is an immortal immovable
5749 // object (undefined) so no write barrier is needed.
5750 ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
5751 masm->isolate()->heap()->undefined_value());
5752 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5753 __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
5754 }
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005755 // Check for function proxy.
mstarzinger@chromium.org88d326b2012-04-23 12:57:22 +00005756 __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005757 __ push(a1); // Put proxy as additional argument.
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00005758 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
5759 __ li(a2, Operand(0, RelocInfo::NONE32));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005760 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
danno@chromium.orgc612e022011-11-10 11:38:15 +00005761 __ SetCallKind(t1, CALL_AS_METHOD);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005762 {
5763 Handle<Code> adaptor =
5764 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
5765 __ Jump(adaptor, RelocInfo::CODE_TARGET);
5766 }
5767
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005768 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
5769 // of the original receiver from the call site).
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00005770 __ bind(&non_function);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005771 __ sw(a1, MemOperand(sp, argc_ * kPointerSize));
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00005772 __ li(a0, Operand(argc_)); // Set up the number of arguments.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005773 __ mov(a2, zero_reg);
5774 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
rossberg@chromium.org717967f2011-07-20 13:44:42 +00005775 __ SetCallKind(t1, CALL_AS_METHOD);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005776 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5777 RelocInfo::CODE_TARGET);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005778}
5779
5780
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005781void CallConstructStub::Generate(MacroAssembler* masm) {
5782 // a0 : number of arguments
5783 // a1 : the function to call
5784 // a2 : cache cell for call target
5785 Label slow, non_function_call;
5786
5787 // Check that the function is not a smi.
5788 __ JumpIfSmi(a1, &non_function_call);
5789 // Check that the function is a JSFunction.
5790 __ GetObjectType(a1, a3, a3);
5791 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
5792
5793 if (RecordCallTarget()) {
5794 GenerateRecordCallTarget(masm);
5795 }
5796
5797 // Jump to the function-specific construct stub.
5798 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
5799 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset));
5800 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
5801 __ Jump(at);
5802
5803 // a0: number of arguments
5804 // a1: called object
5805 // a3: object type
5806 Label do_call;
5807 __ bind(&slow);
5808 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
5809 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
5810 __ jmp(&do_call);
5811
5812 __ bind(&non_function_call);
5813 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
5814 __ bind(&do_call);
5815 // Set expected number of arguments to zero (not changing r0).
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00005816 __ li(a2, Operand(0, RelocInfo::NONE32));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00005817 __ SetCallKind(t1, CALL_AS_METHOD);
5818 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
5819 RelocInfo::CODE_TARGET);
5820}
5821
5822
karlklose@chromium.org83a47282011-05-11 11:54:09 +00005823// StringCharCodeAtGenerator.
lrn@chromium.org7516f052011-03-30 08:52:27 +00005824void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005825 Label flat_string;
5826 Label ascii_string;
5827 Label got_char_code;
ricow@chromium.org4668a2c2011-08-29 10:41:00 +00005828 Label sliced_string;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005829
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005830 ASSERT(!t0.is(index_));
5831 ASSERT(!t0.is(result_));
5832 ASSERT(!t0.is(object_));
5833
5834 // If the receiver is a smi trigger the non-string case.
5835 __ JumpIfSmi(object_, receiver_not_string_);
5836
5837 // Fetch the instance type of the receiver into result register.
5838 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5839 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
5840 // If the receiver is not a string trigger the non-string case.
5841 __ And(t0, result_, Operand(kIsNotStringMask));
5842 __ Branch(receiver_not_string_, ne, t0, Operand(zero_reg));
5843
5844 // If the index is non-smi trigger the non-smi case.
5845 __ JumpIfNotSmi(index_, &index_not_smi_);
5846
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005847 __ bind(&got_smi_index_);
5848
5849 // Check for index out of range.
5850 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset));
danno@chromium.orgc612e022011-11-10 11:38:15 +00005851 __ Branch(index_out_of_range_, ls, t0, Operand(index_));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005852
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005853 __ sra(index_, index_, kSmiTagSize);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005854
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005855 StringCharLoadGenerator::Generate(masm,
5856 object_,
5857 index_,
5858 result_,
5859 &call_runtime_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005860
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005861 __ sll(result_, result_, kSmiTagSize);
5862 __ bind(&exit_);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005863}
5864
5865
5866void StringCharCodeAtGenerator::GenerateSlow(
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00005867 MacroAssembler* masm,
5868 const RuntimeCallHelper& call_helper) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005869 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
5870
5871 // Index is not a smi.
5872 __ bind(&index_not_smi_);
5873 // If index is a heap number, try converting it to an integer.
5874 __ CheckMap(index_,
danno@chromium.orgc612e022011-11-10 11:38:15 +00005875 result_,
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005876 Heap::kHeapNumberMapRootIndex,
5877 index_not_number_,
danno@chromium.org40cb8782011-05-25 07:58:50 +00005878 DONT_DO_SMI_CHECK);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005879 call_helper.BeforeCall(masm);
5880 // Consumed by runtime conversion function:
danno@chromium.orgc612e022011-11-10 11:38:15 +00005881 __ Push(object_, index_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005882 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
5883 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
5884 } else {
5885 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
5886 // NumberToSmi discards numbers that are not exact integers.
5887 __ CallRuntime(Runtime::kNumberToSmi, 1);
5888 }
5889
5890 // Save the conversion result before the pop instructions below
5891 // have a chance to overwrite it.
5892
danno@chromium.orgc612e022011-11-10 11:38:15 +00005893 __ Move(index_, v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005894 __ pop(object_);
5895 // Reload the instance type.
5896 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5897 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
5898 call_helper.AfterCall(masm);
5899 // If index is still not a smi, it must be out of range.
danno@chromium.orgc612e022011-11-10 11:38:15 +00005900 __ JumpIfNotSmi(index_, index_out_of_range_);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005901 // Otherwise, return to the fast path.
5902 __ Branch(&got_smi_index_);
5903
5904 // Call runtime. We get here when the receiver is a string and the
5905 // index is a number, but the code of getting the actual character
5906 // is too complex (e.g., when the string needs to be flattened).
5907 __ bind(&call_runtime_);
5908 call_helper.BeforeCall(masm);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00005909 __ sll(index_, index_, kSmiTagSize);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005910 __ Push(object_, index_);
5911 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5912
5913 __ Move(result_, v0);
5914
5915 call_helper.AfterCall(masm);
5916 __ jmp(&exit_);
5917
5918 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
lrn@chromium.org7516f052011-03-30 08:52:27 +00005919}
5920
5921
5922// -------------------------------------------------------------------------
5923// StringCharFromCodeGenerator
5924
5925void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005926 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5927
5928 ASSERT(!t0.is(result_));
5929 ASSERT(!t0.is(code_));
5930
5931 STATIC_ASSERT(kSmiTag == 0);
5932 STATIC_ASSERT(kSmiShiftSize == 0);
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00005933 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005934 __ And(t0,
5935 code_,
5936 Operand(kSmiTagMask |
jkummerow@chromium.org59297c72013-01-09 16:32:23 +00005937 ((~String::kMaxOneByteCharCode) << kSmiTagSize)));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005938 __ Branch(&slow_case_, ne, t0, Operand(zero_reg));
5939
5940 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
5941 // At this point code register contains smi tagged ASCII char code.
5942 STATIC_ASSERT(kSmiTag == 0);
5943 __ sll(t0, code_, kPointerSizeLog2 - kSmiTagSize);
5944 __ Addu(result_, result_, t0);
5945 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
5946 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
5947 __ Branch(&slow_case_, eq, result_, Operand(t0));
5948 __ bind(&exit_);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005949}
5950
5951
5952void StringCharFromCodeGenerator::GenerateSlow(
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00005953 MacroAssembler* masm,
5954 const RuntimeCallHelper& call_helper) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005955 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5956
5957 __ bind(&slow_case_);
5958 call_helper.BeforeCall(masm);
5959 __ push(code_);
5960 __ CallRuntime(Runtime::kCharFromCode, 1);
5961 __ Move(result_, v0);
5962
5963 call_helper.AfterCall(masm);
5964 __ Branch(&exit_);
5965
5966 __ Abort("Unexpected fallthrough from CharFromCode slow case");
lrn@chromium.org7516f052011-03-30 08:52:27 +00005967}
5968
5969
5970// -------------------------------------------------------------------------
5971// StringCharAtGenerator
5972
5973void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005974 char_code_at_generator_.GenerateFast(masm);
5975 char_from_code_generator_.GenerateFast(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005976}
5977
5978
5979void StringCharAtGenerator::GenerateSlow(
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00005980 MacroAssembler* masm,
5981 const RuntimeCallHelper& call_helper) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005982 char_code_at_generator_.GenerateSlow(masm, call_helper);
5983 char_from_code_generator_.GenerateSlow(masm, call_helper);
lrn@chromium.org7516f052011-03-30 08:52:27 +00005984}
5985
5986
lrn@chromium.org7516f052011-03-30 08:52:27 +00005987void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5988 Register dest,
5989 Register src,
5990 Register count,
5991 Register scratch,
5992 bool ascii) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00005993 Label loop;
5994 Label done;
5995 // This loop just copies one character at a time, as it is only used for
5996 // very short strings.
5997 if (!ascii) {
5998 __ addu(count, count, count);
5999 }
6000 __ Branch(&done, eq, count, Operand(zero_reg));
6001 __ addu(count, dest, count); // Count now points to the last dest byte.
6002
6003 __ bind(&loop);
6004 __ lbu(scratch, MemOperand(src));
6005 __ addiu(src, src, 1);
6006 __ sb(scratch, MemOperand(dest));
6007 __ addiu(dest, dest, 1);
6008 __ Branch(&loop, lt, dest, Operand(count));
6009
6010 __ bind(&done);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006011}
6012
6013
6014enum CopyCharactersFlags {
6015 COPY_ASCII = 1,
6016 DEST_ALWAYS_ALIGNED = 2
6017};
6018
6019
6020void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
6021 Register dest,
6022 Register src,
6023 Register count,
6024 Register scratch1,
6025 Register scratch2,
6026 Register scratch3,
6027 Register scratch4,
6028 Register scratch5,
6029 int flags) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006030 bool ascii = (flags & COPY_ASCII) != 0;
6031 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
6032
6033 if (dest_always_aligned && FLAG_debug_code) {
6034 // Check that destination is actually word aligned if the flag says
6035 // that it is.
6036 __ And(scratch4, dest, Operand(kPointerAlignmentMask));
6037 __ Check(eq,
6038 "Destination of copy not aligned.",
6039 scratch4,
6040 Operand(zero_reg));
6041 }
6042
6043 const int kReadAlignment = 4;
6044 const int kReadAlignmentMask = kReadAlignment - 1;
6045 // Ensure that reading an entire aligned word containing the last character
6046 // of a string will not read outside the allocated area (because we pad up
6047 // to kObjectAlignment).
6048 STATIC_ASSERT(kObjectAlignment >= kReadAlignment);
6049 // Assumes word reads and writes are little endian.
6050 // Nothing to do for zero characters.
6051 Label done;
6052
6053 if (!ascii) {
6054 __ addu(count, count, count);
6055 }
6056 __ Branch(&done, eq, count, Operand(zero_reg));
6057
6058 Label byte_loop;
6059 // Must copy at least eight bytes, otherwise just do it one byte at a time.
6060 __ Subu(scratch1, count, Operand(8));
6061 __ Addu(count, dest, Operand(count));
6062 Register limit = count; // Read until src equals this.
6063 __ Branch(&byte_loop, lt, scratch1, Operand(zero_reg));
6064
6065 if (!dest_always_aligned) {
6066 // Align dest by byte copying. Copies between zero and three bytes.
6067 __ And(scratch4, dest, Operand(kReadAlignmentMask));
6068 Label dest_aligned;
6069 __ Branch(&dest_aligned, eq, scratch4, Operand(zero_reg));
6070 Label aligned_loop;
6071 __ bind(&aligned_loop);
6072 __ lbu(scratch1, MemOperand(src));
6073 __ addiu(src, src, 1);
6074 __ sb(scratch1, MemOperand(dest));
6075 __ addiu(dest, dest, 1);
6076 __ addiu(scratch4, scratch4, 1);
6077 __ Branch(&aligned_loop, le, scratch4, Operand(kReadAlignmentMask));
6078 __ bind(&dest_aligned);
6079 }
6080
6081 Label simple_loop;
6082
6083 __ And(scratch4, src, Operand(kReadAlignmentMask));
6084 __ Branch(&simple_loop, eq, scratch4, Operand(zero_reg));
6085
6086 // Loop for src/dst that are not aligned the same way.
6087 // This loop uses lwl and lwr instructions. These instructions
6088 // depend on the endianness, and the implementation assumes little-endian.
6089 {
6090 Label loop;
6091 __ bind(&loop);
6092 __ lwr(scratch1, MemOperand(src));
6093 __ Addu(src, src, Operand(kReadAlignment));
6094 __ lwl(scratch1, MemOperand(src, -1));
6095 __ sw(scratch1, MemOperand(dest));
6096 __ Addu(dest, dest, Operand(kReadAlignment));
6097 __ Subu(scratch2, limit, dest);
6098 __ Branch(&loop, ge, scratch2, Operand(kReadAlignment));
6099 }
6100
6101 __ Branch(&byte_loop);
6102
6103 // Simple loop.
6104 // Copy words from src to dest, until less than four bytes left.
6105 // Both src and dest are word aligned.
6106 __ bind(&simple_loop);
6107 {
6108 Label loop;
6109 __ bind(&loop);
6110 __ lw(scratch1, MemOperand(src));
6111 __ Addu(src, src, Operand(kReadAlignment));
6112 __ sw(scratch1, MemOperand(dest));
6113 __ Addu(dest, dest, Operand(kReadAlignment));
6114 __ Subu(scratch2, limit, dest);
6115 __ Branch(&loop, ge, scratch2, Operand(kReadAlignment));
6116 }
6117
6118 // Copy bytes from src to dest until dest hits limit.
6119 __ bind(&byte_loop);
6120 // Test if dest has already reached the limit.
6121 __ Branch(&done, ge, dest, Operand(limit));
6122 __ lbu(scratch1, MemOperand(src));
6123 __ addiu(src, src, 1);
6124 __ sb(scratch1, MemOperand(dest));
6125 __ addiu(dest, dest, 1);
6126 __ Branch(&byte_loop);
6127
6128 __ bind(&done);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006129}
6130
6131
6132void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
6133 Register c1,
6134 Register c2,
6135 Register scratch1,
6136 Register scratch2,
6137 Register scratch3,
6138 Register scratch4,
6139 Register scratch5,
6140 Label* not_found) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006141 // Register scratch3 is the general scratch register in this function.
6142 Register scratch = scratch3;
6143
6144 // Make sure that both characters are not digits as such strings has a
6145 // different hash algorithm. Don't try to look for these in the symbol table.
6146 Label not_array_index;
6147 __ Subu(scratch, c1, Operand(static_cast<int>('0')));
6148 __ Branch(&not_array_index,
6149 Ugreater,
6150 scratch,
6151 Operand(static_cast<int>('9' - '0')));
6152 __ Subu(scratch, c2, Operand(static_cast<int>('0')));
6153
6154 // If check failed combine both characters into single halfword.
6155 // This is required by the contract of the method: code at the
6156 // not_found branch expects this combination in c1 register.
6157 Label tmp;
6158 __ sll(scratch1, c2, kBitsPerByte);
6159 __ Branch(&tmp, Ugreater, scratch, Operand(static_cast<int>('9' - '0')));
6160 __ Or(c1, c1, scratch1);
6161 __ bind(&tmp);
danno@chromium.org88aa0582012-03-23 15:11:57 +00006162 __ Branch(
6163 not_found, Uless_equal, scratch, Operand(static_cast<int>('9' - '0')));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006164
6165 __ bind(&not_array_index);
6166 // Calculate the two character string hash.
6167 Register hash = scratch1;
6168 StringHelper::GenerateHashInit(masm, hash, c1);
6169 StringHelper::GenerateHashAddCharacter(masm, hash, c2);
6170 StringHelper::GenerateHashGetHash(masm, hash);
6171
6172 // Collect the two characters in a register.
6173 Register chars = c1;
6174 __ sll(scratch, c2, kBitsPerByte);
6175 __ Or(chars, chars, scratch);
6176
6177 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
6178 // hash: hash of two character string.
6179
6180 // Load symbol table.
6181 // Load address of first element of the symbol table.
6182 Register symbol_table = c2;
6183 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
6184
6185 Register undefined = scratch4;
6186 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
6187
6188 // Calculate capacity mask from the symbol table capacity.
6189 Register mask = scratch2;
6190 __ lw(mask, FieldMemOperand(symbol_table, SymbolTable::kCapacityOffset));
6191 __ sra(mask, mask, 1);
6192 __ Addu(mask, mask, -1);
6193
6194 // Calculate untagged address of the first element of the symbol table.
6195 Register first_symbol_table_element = symbol_table;
6196 __ Addu(first_symbol_table_element, symbol_table,
6197 Operand(SymbolTable::kElementsStartOffset - kHeapObjectTag));
6198
6199 // Registers.
6200 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
6201 // hash: hash of two character string
6202 // mask: capacity mask
6203 // first_symbol_table_element: address of the first element of
6204 // the symbol table
6205 // undefined: the undefined object
6206 // scratch: -
6207
6208 // Perform a number of probes in the symbol table.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00006209 const int kProbes = 4;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006210 Label found_in_symbol_table;
6211 Label next_probe[kProbes];
6212 Register candidate = scratch5; // Scratch register contains candidate.
6213 for (int i = 0; i < kProbes; i++) {
6214 // Calculate entry in symbol table.
6215 if (i > 0) {
6216 __ Addu(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
6217 } else {
6218 __ mov(candidate, hash);
6219 }
6220
6221 __ And(candidate, candidate, Operand(mask));
6222
6223 // Load the entry from the symble table.
6224 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
6225 __ sll(scratch, candidate, kPointerSizeLog2);
6226 __ Addu(scratch, scratch, first_symbol_table_element);
6227 __ lw(candidate, MemOperand(scratch));
6228
6229 // If entry is undefined no string with this hash can be found.
6230 Label is_string;
6231 __ GetObjectType(candidate, scratch, scratch);
6232 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE));
6233
6234 __ Branch(not_found, eq, undefined, Operand(candidate));
danno@chromium.org2c456792011-11-11 12:00:53 +00006235 // Must be the hole (deleted entry).
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006236 if (FLAG_debug_code) {
danno@chromium.org2c456792011-11-11 12:00:53 +00006237 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
6238 __ Assert(eq, "oddball in symbol table is not undefined or the hole",
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006239 scratch, Operand(candidate));
6240 }
6241 __ jmp(&next_probe[i]);
6242
6243 __ bind(&is_string);
6244
6245 // Check that the candidate is a non-external ASCII string. The instance
6246 // type is still in the scratch register from the CompareObjectType
6247 // operation.
6248 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
6249
6250 // If length is not 2 the string is not a candidate.
6251 __ lw(scratch, FieldMemOperand(candidate, String::kLengthOffset));
6252 __ Branch(&next_probe[i], ne, scratch, Operand(Smi::FromInt(2)));
6253
6254 // Check if the two characters match.
6255 // Assumes that word load is little endian.
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006256 __ lhu(scratch, FieldMemOperand(candidate, SeqOneByteString::kHeaderSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006257 __ Branch(&found_in_symbol_table, eq, chars, Operand(scratch));
6258 __ bind(&next_probe[i]);
6259 }
6260
6261 // No matching 2 character string found by probing.
6262 __ jmp(not_found);
6263
6264 // Scratch register contains result when we fall through to here.
6265 Register result = candidate;
6266 __ bind(&found_in_symbol_table);
6267 __ mov(v0, result);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006268}
6269
6270
6271void StringHelper::GenerateHashInit(MacroAssembler* masm,
rossberg@chromium.orgfab14982012-01-05 15:02:15 +00006272 Register hash,
6273 Register character) {
6274 // hash = seed + character + ((seed + character) << 10);
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00006275 __ LoadRoot(hash, Heap::kHashSeedRootIndex);
rossberg@chromium.orgfab14982012-01-05 15:02:15 +00006276 // Untag smi seed and add the character.
6277 __ SmiUntag(hash);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006278 __ addu(hash, hash, character);
rossberg@chromium.orgfab14982012-01-05 15:02:15 +00006279 __ sll(at, hash, 10);
6280 __ addu(hash, hash, at);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006281 // hash ^= hash >> 6;
danno@chromium.org2c456792011-11-11 12:00:53 +00006282 __ srl(at, hash, 6);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006283 __ xor_(hash, hash, at);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006284}
6285
6286
6287void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
rossberg@chromium.orgfab14982012-01-05 15:02:15 +00006288 Register hash,
6289 Register character) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006290 // hash += character;
6291 __ addu(hash, hash, character);
6292 // hash += hash << 10;
6293 __ sll(at, hash, 10);
6294 __ addu(hash, hash, at);
6295 // hash ^= hash >> 6;
danno@chromium.org2c456792011-11-11 12:00:53 +00006296 __ srl(at, hash, 6);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006297 __ xor_(hash, hash, at);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006298}
6299
6300
6301void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00006302 Register hash) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006303 // hash += hash << 3;
6304 __ sll(at, hash, 3);
6305 __ addu(hash, hash, at);
6306 // hash ^= hash >> 11;
danno@chromium.org2c456792011-11-11 12:00:53 +00006307 __ srl(at, hash, 11);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006308 __ xor_(hash, hash, at);
6309 // hash += hash << 15;
6310 __ sll(at, hash, 15);
6311 __ addu(hash, hash, at);
6312
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00006313 __ li(at, Operand(String::kHashBitMask));
danno@chromium.org2c456792011-11-11 12:00:53 +00006314 __ and_(hash, hash, at);
6315
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006316 // if (hash == 0) hash = 27;
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00006317 __ ori(at, zero_reg, StringHasher::kZeroHash);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00006318 __ Movz(hash, at, hash);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006319}
6320
6321
6322void SubStringStub::Generate(MacroAssembler* masm) {
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006323 Label runtime;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006324 // Stack frame on entry.
6325 // ra: return address
6326 // sp[0]: to
6327 // sp[4]: from
6328 // sp[8]: string
6329
6330 // This stub is called from the native-call %_SubString(...), so
6331 // nothing can be assumed about the arguments. It is tested that:
6332 // "string" is a sequential string,
6333 // both "from" and "to" are smis, and
6334 // 0 <= from <= to <= string.length.
6335 // If any of these assumptions fail, we call the runtime system.
6336
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00006337 const int kToOffset = 0 * kPointerSize;
6338 const int kFromOffset = 1 * kPointerSize;
6339 const int kStringOffset = 2 * kPointerSize;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006340
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006341 __ lw(a2, MemOperand(sp, kToOffset));
6342 __ lw(a3, MemOperand(sp, kFromOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006343 STATIC_ASSERT(kFromOffset == kToOffset + 4);
6344 STATIC_ASSERT(kSmiTag == 0);
6345 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
6346
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006347 // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
6348 // safe in this case.
ulan@chromium.org65a89c22012-02-14 11:46:07 +00006349 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
6350 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006351 // Both a2 and a3 are untagged integers.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006352
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006353 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006354
ulan@chromium.org65a89c22012-02-14 11:46:07 +00006355 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
6356 __ Subu(a2, a2, a3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006357
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006358 // Make sure first argument is a string.
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006359 __ lw(v0, MemOperand(sp, kStringOffset));
ulan@chromium.org65a89c22012-02-14 11:46:07 +00006360 __ JumpIfSmi(v0, &runtime);
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006361 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006362 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
ulan@chromium.org65a89c22012-02-14 11:46:07 +00006363 __ And(t0, a1, Operand(kIsNotStringMask));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006364
ulan@chromium.org65a89c22012-02-14 11:46:07 +00006365 __ Branch(&runtime, ne, t0, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006366
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006367 // Short-cut for the case of trivial substring.
6368 Label return_v0;
6369 // v0: original string
6370 // a2: result string length
6371 __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
6372 __ sra(t0, t0, 1);
svenpanne@chromium.orgfb046332012-04-19 12:02:44 +00006373 // Return original string.
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006374 __ Branch(&return_v0, eq, a2, Operand(t0));
svenpanne@chromium.orgfb046332012-04-19 12:02:44 +00006375 // Longer than original string's length or negative: unsafe arguments.
6376 __ Branch(&runtime, hi, a2, Operand(t0));
6377 // Shorter than original string's length: an actual substring.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006378
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006379 // Deal with different string types: update the index if necessary
6380 // and put the underlying string into t1.
6381 // v0: original string
6382 // a1: instance type
6383 // a2: length
6384 // a3: from index (untagged)
6385 Label underlying_unpacked, sliced_string, seq_or_external_string;
6386 // If the string is not indirect, it can only be sequential or external.
6387 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
6388 STATIC_ASSERT(kIsIndirectStringMask != 0);
6389 __ And(t0, a1, Operand(kIsIndirectStringMask));
6390 __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00006391 // t0 is used as a scratch register and can be overwritten in either case.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006392 __ And(t0, a1, Operand(kSlicedNotConsMask));
6393 __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
6394 // Cons string. Check whether it is flat, then fetch first part.
6395 __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
6396 __ LoadRoot(t0, Heap::kEmptyStringRootIndex);
6397 __ Branch(&runtime, ne, t1, Operand(t0));
6398 __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
6399 // Update instance type.
6400 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
6401 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
6402 __ jmp(&underlying_unpacked);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006403
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006404 __ bind(&sliced_string);
6405 // Sliced string. Fetch parent and correct start index by offset.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006406 __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00006407 __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00006408 __ sra(t0, t0, 1); // Add offset to index.
6409 __ Addu(a3, a3, t0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006410 // Update instance type.
6411 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
6412 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
6413 __ jmp(&underlying_unpacked);
6414
6415 __ bind(&seq_or_external_string);
6416 // Sequential or external string. Just move string to the expected register.
6417 __ mov(t1, v0);
6418
6419 __ bind(&underlying_unpacked);
6420
6421 if (FLAG_string_slices) {
6422 Label copy_routine;
6423 // t1: underlying subject string
6424 // a1: instance type of underlying subject string
6425 // a2: length
6426 // a3: adjusted start index (untagged)
6427 // Short slice. Copy instead of slicing.
6428 __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
6429 // Allocate new sliced string. At this point we do not reload the instance
6430 // type including the string encoding because we simply rely on the info
6431 // provided by the original string. It does not matter if the original
6432 // string's encoding is wrong because we always have to recheck encoding of
6433 // the newly created string's parent anyways due to externalized strings.
6434 Label two_byte_slice, set_slice_header;
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +00006435 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006436 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
6437 __ And(t0, a1, Operand(kStringEncodingMask));
6438 __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
6439 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
6440 __ jmp(&set_slice_header);
6441 __ bind(&two_byte_slice);
6442 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
6443 __ bind(&set_slice_header);
6444 __ sll(a3, a3, 1);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006445 __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00006446 __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006447 __ jmp(&return_v0);
6448
6449 __ bind(&copy_routine);
6450 }
6451
6452 // t1: underlying subject string
6453 // a1: instance type of underlying subject string
6454 // a2: length
6455 // a3: adjusted start index (untagged)
6456 Label two_byte_sequential, sequential_string, allocate_result;
6457 STATIC_ASSERT(kExternalStringTag != 0);
6458 STATIC_ASSERT(kSeqStringTag == 0);
6459 __ And(t0, a1, Operand(kExternalStringTag));
6460 __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
6461
6462 // Handle external string.
6463 // Rule out short external strings.
6464 STATIC_CHECK(kShortExternalStringTag != 0);
6465 __ And(t0, a1, Operand(kShortExternalStringTag));
6466 __ Branch(&runtime, ne, t0, Operand(zero_reg));
6467 __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
6468 // t1 already points to the first character of underlying string.
6469 __ jmp(&allocate_result);
6470
6471 __ bind(&sequential_string);
6472 // Locate first character of underlying subject string.
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006473 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
6474 __ Addu(t1, t1, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006475
6476 __ bind(&allocate_result);
6477 // Sequential acii string. Allocate the result.
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +00006478 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006479 __ And(t0, a1, Operand(kStringEncodingMask));
6480 __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
6481
ulan@chromium.org2efb9002012-01-19 15:36:35 +00006482 // Allocate and copy the resulting ASCII string.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006483 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
6484
6485 // Locate first character of substring to copy.
6486 __ Addu(t1, t1, a3);
6487
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006488 // Locate first character of result.
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006489 __ Addu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006490
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006491 // v0: result string
6492 // a1: first character of result string
6493 // a2: result string length
6494 // t1: first character of substring to copy
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006495 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006496 StringHelper::GenerateCopyCharactersLong(
6497 masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006498 __ jmp(&return_v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006499
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006500 // Allocate and copy the resulting two-byte string.
6501 __ bind(&two_byte_sequential);
6502 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006503
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006504 // Locate first character of substring to copy.
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006505 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006506 __ sll(t0, a3, 1);
6507 __ Addu(t1, t1, t0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006508 // Locate first character of result.
6509 __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006510
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006511 // v0: result string.
6512 // a1: first character of result.
6513 // a2: result length.
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006514 // t1: first character of substring to copy.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006515 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
6516 StringHelper::GenerateCopyCharactersLong(
6517 masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
fschneider@chromium.org1805e212011-09-05 10:49:12 +00006518
6519 __ bind(&return_v0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006520 Counters* counters = masm->isolate()->counters();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006521 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006522 __ DropAndRet(3);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006523
6524 // Just jump to runtime to create the sub string.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006525 __ bind(&runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006526 __ TailCallRuntime(Runtime::kSubString, 3, 1);
6527}
6528
6529
6530void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
6531 Register left,
6532 Register right,
6533 Register scratch1,
6534 Register scratch2,
6535 Register scratch3) {
6536 Register length = scratch1;
6537
6538 // Compare lengths.
6539 Label strings_not_equal, check_zero_length;
6540 __ lw(length, FieldMemOperand(left, String::kLengthOffset));
6541 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset));
6542 __ Branch(&check_zero_length, eq, length, Operand(scratch2));
6543 __ bind(&strings_not_equal);
6544 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL)));
6545 __ Ret();
6546
6547 // Check if the length is zero.
6548 Label compare_chars;
6549 __ bind(&check_zero_length);
6550 STATIC_ASSERT(kSmiTag == 0);
6551 __ Branch(&compare_chars, ne, length, Operand(zero_reg));
6552 __ li(v0, Operand(Smi::FromInt(EQUAL)));
6553 __ Ret();
6554
6555 // Compare characters.
6556 __ bind(&compare_chars);
6557
6558 GenerateAsciiCharsCompareLoop(masm,
6559 left, right, length, scratch2, scratch3, v0,
6560 &strings_not_equal);
6561
6562 // Characters are equal.
6563 __ li(v0, Operand(Smi::FromInt(EQUAL)));
6564 __ Ret();
lrn@chromium.org7516f052011-03-30 08:52:27 +00006565}
6566
6567
6568void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
lrn@chromium.org7516f052011-03-30 08:52:27 +00006569 Register left,
karlklose@chromium.org83a47282011-05-11 11:54:09 +00006570 Register right,
lrn@chromium.org7516f052011-03-30 08:52:27 +00006571 Register scratch1,
6572 Register scratch2,
6573 Register scratch3,
6574 Register scratch4) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006575 Label result_not_equal, compare_lengths;
6576 // Find minimum length and length difference.
6577 __ lw(scratch1, FieldMemOperand(left, String::kLengthOffset));
6578 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset));
6579 __ Subu(scratch3, scratch1, Operand(scratch2));
6580 Register length_delta = scratch3;
6581 __ slt(scratch4, scratch2, scratch1);
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00006582 __ Movn(scratch1, scratch2, scratch4);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006583 Register min_length = scratch1;
6584 STATIC_ASSERT(kSmiTag == 0);
6585 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg));
6586
6587 // Compare loop.
6588 GenerateAsciiCharsCompareLoop(masm,
6589 left, right, min_length, scratch2, scratch4, v0,
6590 &result_not_equal);
6591
6592 // Compare lengths - strings up to min-length are equal.
6593 __ bind(&compare_lengths);
6594 ASSERT(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
6595 // Use length_delta as result if it's zero.
6596 __ mov(scratch2, length_delta);
6597 __ mov(scratch4, zero_reg);
6598 __ mov(v0, zero_reg);
6599
6600 __ bind(&result_not_equal);
6601 // Conditionally update the result based either on length_delta or
6602 // the last comparion performed in the loop above.
6603 Label ret;
6604 __ Branch(&ret, eq, scratch2, Operand(scratch4));
6605 __ li(v0, Operand(Smi::FromInt(GREATER)));
6606 __ Branch(&ret, gt, scratch2, Operand(scratch4));
6607 __ li(v0, Operand(Smi::FromInt(LESS)));
6608 __ bind(&ret);
6609 __ Ret();
6610}
6611
6612
6613void StringCompareStub::GenerateAsciiCharsCompareLoop(
6614 MacroAssembler* masm,
6615 Register left,
6616 Register right,
6617 Register length,
6618 Register scratch1,
6619 Register scratch2,
6620 Register scratch3,
6621 Label* chars_not_equal) {
6622 // Change index to run from -length to -1 by adding length to string
6623 // start. This means that loop ends when index reaches zero, which
6624 // doesn't need an additional compare.
6625 __ SmiUntag(length);
6626 __ Addu(scratch1, length,
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006627 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006628 __ Addu(left, left, Operand(scratch1));
6629 __ Addu(right, right, Operand(scratch1));
6630 __ Subu(length, zero_reg, length);
6631 Register index = length; // index = -length;
6632
6633
6634 // Compare loop.
6635 Label loop;
6636 __ bind(&loop);
6637 __ Addu(scratch3, left, index);
6638 __ lbu(scratch1, MemOperand(scratch3));
6639 __ Addu(scratch3, right, index);
6640 __ lbu(scratch2, MemOperand(scratch3));
6641 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2));
6642 __ Addu(index, index, 1);
6643 __ Branch(&loop, ne, index, Operand(zero_reg));
lrn@chromium.org7516f052011-03-30 08:52:27 +00006644}
6645
6646
6647void StringCompareStub::Generate(MacroAssembler* masm) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006648 Label runtime;
6649
6650 Counters* counters = masm->isolate()->counters();
6651
6652 // Stack frame on entry.
6653 // sp[0]: right string
6654 // sp[4]: left string
6655 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); // Left.
6656 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); // Right.
6657
6658 Label not_same;
6659 __ Branch(&not_same, ne, a0, Operand(a1));
6660 STATIC_ASSERT(EQUAL == 0);
6661 STATIC_ASSERT(kSmiTag == 0);
6662 __ li(v0, Operand(Smi::FromInt(EQUAL)));
6663 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00006664 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006665
6666 __ bind(&not_same);
6667
6668 // Check that both objects are sequential ASCII strings.
6669 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime);
6670
6671 // Compare flat ASCII strings natively. Remove arguments from stack first.
6672 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
6673 __ Addu(sp, sp, Operand(2 * kPointerSize));
6674 GenerateCompareFlatAsciiStrings(masm, a1, a0, a2, a3, t0, t1);
6675
6676 __ bind(&runtime);
6677 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
lrn@chromium.org7516f052011-03-30 08:52:27 +00006678}
6679
6680
6681void StringAddStub::Generate(MacroAssembler* masm) {
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006682 Label call_runtime, call_builtin;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006683 Builtins::JavaScript builtin_id = Builtins::ADD;
6684
6685 Counters* counters = masm->isolate()->counters();
6686
6687 // Stack on entry:
6688 // sp[0]: second argument (right).
6689 // sp[4]: first argument (left).
6690
6691 // Load the two arguments.
6692 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); // First argument.
6693 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
6694
6695 // Make sure that both arguments are strings if not known in advance.
6696 if (flags_ == NO_STRING_ADD_FLAGS) {
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006697 __ JumpIfEitherSmi(a0, a1, &call_runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006698 // Load instance types.
6699 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6700 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
6701 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset));
6702 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
6703 STATIC_ASSERT(kStringTag == 0);
6704 // If either is not a string, go to runtime.
6705 __ Or(t4, t0, Operand(t1));
6706 __ And(t4, t4, Operand(kIsNotStringMask));
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006707 __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006708 } else {
6709 // Here at least one of the arguments is definitely a string.
6710 // We convert the one that is not known to be a string.
6711 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
6712 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
6713 GenerateConvertArgument(
6714 masm, 1 * kPointerSize, a0, a2, a3, t0, t1, &call_builtin);
6715 builtin_id = Builtins::STRING_ADD_RIGHT;
6716 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
6717 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
6718 GenerateConvertArgument(
6719 masm, 0 * kPointerSize, a1, a2, a3, t0, t1, &call_builtin);
6720 builtin_id = Builtins::STRING_ADD_LEFT;
6721 }
6722 }
6723
6724 // Both arguments are strings.
6725 // a0: first string
6726 // a1: second string
6727 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6728 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6729 {
6730 Label strings_not_empty;
6731 // Check if either of the strings are empty. In that case return the other.
6732 // These tests use zero-length check on string-length whch is an Smi.
6733 // Assert that Smi::FromInt(0) is really 0.
6734 STATIC_ASSERT(kSmiTag == 0);
6735 ASSERT(Smi::FromInt(0) == 0);
6736 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset));
6737 __ lw(a3, FieldMemOperand(a1, String::kLengthOffset));
6738 __ mov(v0, a0); // Assume we'll return first string (from a0).
mstarzinger@chromium.org3233d2f2012-03-14 11:16:03 +00006739 __ Movz(v0, a1, a2); // If first is empty, return second (from a1).
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006740 __ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1.
6741 __ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1.
6742 __ and_(t4, t4, t5); // Branch if both strings were non-empty.
6743 __ Branch(&strings_not_empty, ne, t4, Operand(zero_reg));
6744
6745 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006746 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006747
6748 __ bind(&strings_not_empty);
6749 }
6750
6751 // Untag both string-lengths.
6752 __ sra(a2, a2, kSmiTagSize);
6753 __ sra(a3, a3, kSmiTagSize);
6754
6755 // Both strings are non-empty.
6756 // a0: first string
6757 // a1: second string
6758 // a2: length of first string
6759 // a3: length of second string
6760 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6761 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6762 // Look at the length of the result of adding the two strings.
6763 Label string_add_flat_result, longer_than_two;
6764 // Adding two lengths can't overflow.
6765 STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
6766 __ Addu(t2, a2, Operand(a3));
6767 // Use the symbol table when adding two one character strings, as it
6768 // helps later optimizations to return a symbol here.
6769 __ Branch(&longer_than_two, ne, t2, Operand(2));
6770
6771 // Check that both strings are non-external ASCII strings.
6772 if (flags_ != NO_STRING_ADD_FLAGS) {
6773 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6774 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
6775 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset));
6776 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
6777 }
6778 __ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3,
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006779 &call_runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006780
6781 // Get the two characters forming the sub string.
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006782 __ lbu(a2, FieldMemOperand(a0, SeqOneByteString::kHeaderSize));
6783 __ lbu(a3, FieldMemOperand(a1, SeqOneByteString::kHeaderSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006784
6785 // Try to lookup two character string in symbol table. If it is not found
6786 // just allocate a new one.
6787 Label make_two_character_string;
6788 StringHelper::GenerateTwoCharacterSymbolTableProbe(
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006789 masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006790 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006791 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006792
6793 __ bind(&make_two_character_string);
6794 // Resulting string has length 2 and first chars of two strings
6795 // are combined into single halfword in a2 register.
6796 // So we can fill resulting string without two loops by a single
6797 // halfword store instruction (which assumes that processor is
6798 // in a little endian mode).
6799 __ li(t2, Operand(2));
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006800 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006801 __ sh(a2, FieldMemOperand(v0, SeqOneByteString::kHeaderSize));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006802 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006803 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006804
6805 __ bind(&longer_than_two);
6806 // Check if resulting string will be flat.
danno@chromium.org88aa0582012-03-23 15:11:57 +00006807 __ Branch(&string_add_flat_result, lt, t2, Operand(ConsString::kMinLength));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006808 // Handle exceptionally long strings in the runtime system.
6809 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
6810 ASSERT(IsPowerOf2(String::kMaxLength + 1));
6811 // kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006812 __ Branch(&call_runtime, hs, t2, Operand(String::kMaxLength + 1));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006813
6814 // If result is not supposed to be flat, allocate a cons string object.
6815 // If both strings are ASCII the result is an ASCII cons string.
6816 if (flags_ != NO_STRING_ADD_FLAGS) {
6817 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6818 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
6819 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset));
6820 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
6821 }
6822 Label non_ascii, allocated, ascii_data;
6823 STATIC_ASSERT(kTwoByteStringTag == 0);
ulan@chromium.org2efb9002012-01-19 15:36:35 +00006824 // Branch to non_ascii if either string-encoding field is zero (non-ASCII).
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006825 __ And(t4, t0, Operand(t1));
6826 __ And(t4, t4, Operand(kStringEncodingMask));
6827 __ Branch(&non_ascii, eq, t4, Operand(zero_reg));
6828
6829 // Allocate an ASCII cons string.
6830 __ bind(&ascii_data);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006831 __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006832 __ bind(&allocated);
6833 // Fill the fields of the cons string.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006834 __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset));
6835 __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006836 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006837 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006838
6839 __ bind(&non_ascii);
6840 // At least one of the strings is two-byte. Check whether it happens
6841 // to contain only ASCII characters.
6842 // t0: first instance type.
6843 // t1: second instance type.
6844 // Branch to if _both_ instances have kAsciiDataHintMask set.
6845 __ And(at, t0, Operand(kAsciiDataHintMask));
6846 __ and_(at, at, t1);
6847 __ Branch(&ascii_data, ne, at, Operand(zero_reg));
yangguo@chromium.org46a2a512013-01-18 16:29:40 +00006848 __ Xor(t0, t0, Operand(t1));
6849 STATIC_ASSERT(kOneByteStringTag != 0 && kAsciiDataHintTag != 0);
6850 __ And(t0, t0, Operand(kOneByteStringTag | kAsciiDataHintTag));
6851 __ Branch(&ascii_data, eq, t0,
6852 Operand(kOneByteStringTag | kAsciiDataHintTag));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006853
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006854 // Allocate a two byte cons string.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006855 __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006856 __ Branch(&allocated);
6857
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006858 // We cannot encounter sliced strings or cons strings here since:
ulan@chromium.org2efb9002012-01-19 15:36:35 +00006859 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006860 // Handle creating a flat result from either external or sequential strings.
6861 // Locate the first characters' locations.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006862 // a0: first string
6863 // a1: second string
6864 // a2: length of first string
6865 // a3: length of second string
6866 // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6867 // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
6868 // t2: sum of lengths.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006869 Label first_prepared, second_prepared;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006870 __ bind(&string_add_flat_result);
6871 if (flags_ != NO_STRING_ADD_FLAGS) {
6872 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
6873 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
6874 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset));
6875 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
6876 }
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006877 // Check whether both strings have same encoding
6878 __ Xor(t3, t0, Operand(t1));
6879 __ And(t3, t3, Operand(kStringEncodingMask));
6880 __ Branch(&call_runtime, ne, t3, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006881
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006882 STATIC_ASSERT(kSeqStringTag == 0);
6883 __ And(t4, t0, Operand(kStringRepresentationMask));
6884
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006885 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006886 Label skip_first_add;
6887 __ Branch(&skip_first_add, ne, t4, Operand(zero_reg));
6888 __ Branch(USE_DELAY_SLOT, &first_prepared);
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006889 __ addiu(t3, a0, SeqOneByteString::kHeaderSize - kHeapObjectTag);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006890 __ bind(&skip_first_add);
6891 // External string: rule out short external string and load string resource.
6892 STATIC_ASSERT(kShortExternalStringTag != 0);
6893 __ And(t4, t0, Operand(kShortExternalStringMask));
6894 __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
6895 __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset));
6896 __ bind(&first_prepared);
6897
6898 STATIC_ASSERT(kSeqStringTag == 0);
6899 __ And(t4, t1, Operand(kStringRepresentationMask));
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006900 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006901 Label skip_second_add;
6902 __ Branch(&skip_second_add, ne, t4, Operand(zero_reg));
6903 __ Branch(USE_DELAY_SLOT, &second_prepared);
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006904 __ addiu(a1, a1, SeqOneByteString::kHeaderSize - kHeapObjectTag);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006905 __ bind(&skip_second_add);
6906 // External string: rule out short external string and load string resource.
6907 STATIC_ASSERT(kShortExternalStringTag != 0);
6908 __ And(t4, t1, Operand(kShortExternalStringMask));
6909 __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
6910 __ lw(a1, FieldMemOperand(a1, ExternalString::kResourceDataOffset));
6911 __ bind(&second_prepared);
6912
6913 Label non_ascii_string_add_flat_result;
6914 // t3: first character of first string
6915 // a1: first character of second string
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006916 // a2: length of first string
6917 // a3: length of second string
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006918 // t2: sum of lengths.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006919 // Both strings have the same encoding.
6920 STATIC_ASSERT(kTwoByteStringTag == 0);
6921 __ And(t4, t1, Operand(kStringEncodingMask));
6922 __ Branch(&non_ascii_string_add_flat_result, eq, t4, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006923
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006924 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
yangguo@chromium.orgfb377212012-11-16 14:43:43 +00006925 __ Addu(t2, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006926 // v0: result string.
6927 // t3: first character of first string.
6928 // a1: first character of second string
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006929 // a2: length of first string.
6930 // a3: length of second string.
6931 // t2: first character of result.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006932
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006933 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006934 // t2: next character of result.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006935 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006936 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006937 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006938
6939 __ bind(&non_ascii_string_add_flat_result);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006940 __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime);
6941 __ Addu(t2, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
6942 // v0: result string.
6943 // t3: first character of first string.
6944 // a1: first character of second string.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006945 // a2: length of first string.
6946 // a3: length of second string.
6947 // t2: first character of result.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006948 StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false);
6949 // t2: next character of result.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006950 StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false);
6951
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006952 __ IncrementCounter(counters->string_add_native(), 1, a2, a3);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006953 __ DropAndRet(2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006954
6955 // Just jump to runtime to add the two strings.
ricow@chromium.org7ad65222011-12-19 12:13:11 +00006956 __ bind(&call_runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00006957 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
6958
6959 if (call_builtin.is_linked()) {
6960 __ bind(&call_builtin);
6961 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
6962 }
6963}
6964
6965
6966void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
6967 int stack_offset,
6968 Register arg,
6969 Register scratch1,
6970 Register scratch2,
6971 Register scratch3,
6972 Register scratch4,
6973 Label* slow) {
6974 // First check if the argument is already a string.
6975 Label not_string, done;
6976 __ JumpIfSmi(arg, &not_string);
6977 __ GetObjectType(arg, scratch1, scratch1);
6978 __ Branch(&done, lt, scratch1, Operand(FIRST_NONSTRING_TYPE));
6979
6980 // Check the number to string cache.
6981 Label not_cached;
6982 __ bind(&not_string);
6983 // Puts the cached result into scratch1.
6984 NumberToStringStub::GenerateLookupNumberStringCache(masm,
6985 arg,
6986 scratch1,
6987 scratch2,
6988 scratch3,
6989 scratch4,
6990 false,
6991 &not_cached);
6992 __ mov(arg, scratch1);
6993 __ sw(arg, MemOperand(sp, stack_offset));
6994 __ jmp(&done);
6995
6996 // Check if the argument is a safe string wrapper.
6997 __ bind(&not_cached);
6998 __ JumpIfSmi(arg, slow);
6999 __ GetObjectType(arg, scratch1, scratch2); // map -> scratch1.
7000 __ Branch(slow, ne, scratch2, Operand(JS_VALUE_TYPE));
7001 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
7002 __ li(scratch4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
7003 __ And(scratch2, scratch2, scratch4);
7004 __ Branch(slow, ne, scratch2, Operand(scratch4));
7005 __ lw(arg, FieldMemOperand(arg, JSValue::kValueOffset));
7006 __ sw(arg, MemOperand(sp, stack_offset));
7007
7008 __ bind(&done);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007009}
7010
7011
7012void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007013 ASSERT(state_ == CompareIC::SMI);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007014 Label miss;
7015 __ Or(a2, a1, a0);
7016 __ JumpIfNotSmi(a2, &miss);
7017
7018 if (GetCondition() == eq) {
7019 // For equality we do not care about the sign of the result.
7020 __ Subu(v0, a0, a1);
7021 } else {
7022 // Untag before subtracting to avoid handling overflow.
7023 __ SmiUntag(a1);
7024 __ SmiUntag(a0);
7025 __ Subu(v0, a1, a0);
7026 }
7027 __ Ret();
7028
7029 __ bind(&miss);
7030 GenerateMiss(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007031}
7032
7033
7034void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007035 ASSERT(state_ == CompareIC::HEAP_NUMBER);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007036
7037 Label generic_stub;
ulan@chromium.org9a21ec42012-03-06 08:42:24 +00007038 Label unordered, maybe_undefined1, maybe_undefined2;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007039 Label miss;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007040
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007041 if (left_ == CompareIC::SMI) {
7042 __ JumpIfNotSmi(a1, &miss);
7043 }
7044 if (right_ == CompareIC::SMI) {
7045 __ JumpIfNotSmi(a0, &miss);
7046 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007047
7048 // Inlining the double comparison and falling back to the general compare
7049 // stub if NaN is involved or FPU is unsupported.
7050 if (CpuFeatures::IsSupported(FPU)) {
7051 CpuFeatures::Scope scope(FPU);
7052
7053 // Load left and right operand.
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007054 Label done, left, left_smi, right_smi;
7055 __ JumpIfSmi(a0, &right_smi);
7056 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
7057 DONT_DO_SMI_CHECK);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007058 __ Subu(a2, a0, Operand(kHeapObjectTag));
7059 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007060 __ Branch(&left);
7061 __ bind(&right_smi);
7062 __ SmiUntag(a2, a0); // Can't clobber a0 yet.
7063 FPURegister single_scratch = f6;
7064 __ mtc1(a2, single_scratch);
7065 __ cvt_d_w(f2, single_scratch);
7066
7067 __ bind(&left);
7068 __ JumpIfSmi(a1, &left_smi);
7069 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
7070 DONT_DO_SMI_CHECK);
7071 __ Subu(a2, a1, Operand(kHeapObjectTag));
7072 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
7073 __ Branch(&done);
7074 __ bind(&left_smi);
7075 __ SmiUntag(a2, a1); // Can't clobber a1 yet.
7076 single_scratch = f8;
7077 __ mtc1(a2, single_scratch);
7078 __ cvt_d_w(f0, single_scratch);
7079
7080 __ bind(&done);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007081
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007082 // Return a result of -1, 0, or 1, or use CompareStub for NaNs.
7083 Label fpu_eq, fpu_lt;
7084 // Test if equal, and also handle the unordered/NaN case.
7085 __ BranchF(&fpu_eq, &unordered, eq, f0, f2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007086
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007087 // Test if less (unordered case is already handled).
7088 __ BranchF(&fpu_lt, NULL, lt, f0, f2);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007089
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007090 // Otherwise it's greater, so just fall thru, and return.
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007091 __ li(v0, Operand(GREATER));
7092 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007093
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007094 __ bind(&fpu_eq);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007095 __ li(v0, Operand(EQUAL));
7096 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007097
7098 __ bind(&fpu_lt);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007099 __ li(v0, Operand(LESS));
7100 __ Ret();
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007101 }
7102
ulan@chromium.org9a21ec42012-03-06 08:42:24 +00007103 __ bind(&unordered);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007104 __ bind(&generic_stub);
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007105 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
7106 CompareIC::GENERIC);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007107 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
7108
ulan@chromium.org9a21ec42012-03-06 08:42:24 +00007109 __ bind(&maybe_undefined1);
7110 if (Token::IsOrderedRelationalCompareOp(op_)) {
7111 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
7112 __ Branch(&miss, ne, a0, Operand(at));
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007113 __ JumpIfSmi(a1, &unordered);
ulan@chromium.org9a21ec42012-03-06 08:42:24 +00007114 __ GetObjectType(a1, a2, a2);
7115 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
7116 __ jmp(&unordered);
7117 }
7118
7119 __ bind(&maybe_undefined2);
7120 if (Token::IsOrderedRelationalCompareOp(op_)) {
7121 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
7122 __ Branch(&unordered, eq, a1, Operand(at));
7123 }
7124
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007125 __ bind(&miss);
7126 GenerateMiss(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007127}
7128
7129
karlklose@chromium.org83a47282011-05-11 11:54:09 +00007130void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007131 ASSERT(state_ == CompareIC::SYMBOL);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007132 Label miss;
7133
7134 // Registers containing left and right operands respectively.
7135 Register left = a1;
7136 Register right = a0;
7137 Register tmp1 = a2;
7138 Register tmp2 = a3;
7139
7140 // Check that both operands are heap objects.
7141 __ JumpIfEitherSmi(left, right, &miss);
7142
7143 // Check that both operands are symbols.
7144 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
7145 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
7146 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
7147 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
7148 STATIC_ASSERT(kSymbolTag != 0);
7149 __ And(tmp1, tmp1, Operand(tmp2));
7150 __ And(tmp1, tmp1, kIsSymbolMask);
7151 __ Branch(&miss, eq, tmp1, Operand(zero_reg));
7152 // Make sure a0 is non-zero. At this point input operands are
7153 // guaranteed to be non-zero.
7154 ASSERT(right.is(a0));
7155 STATIC_ASSERT(EQUAL == 0);
7156 STATIC_ASSERT(kSmiTag == 0);
7157 __ mov(v0, right);
7158 // Symbols are compared by identity.
7159 __ Ret(ne, left, Operand(right));
7160 __ li(v0, Operand(Smi::FromInt(EQUAL)));
7161 __ Ret();
7162
7163 __ bind(&miss);
7164 GenerateMiss(masm);
7165}
karlklose@chromium.org83a47282011-05-11 11:54:09 +00007166
7167
7168void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007169 ASSERT(state_ == CompareIC::STRING);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007170 Label miss;
7171
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007172 bool equality = Token::IsEqualityOp(op_);
7173
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007174 // Registers containing left and right operands respectively.
7175 Register left = a1;
7176 Register right = a0;
7177 Register tmp1 = a2;
7178 Register tmp2 = a3;
7179 Register tmp3 = t0;
7180 Register tmp4 = t1;
7181 Register tmp5 = t2;
7182
7183 // Check that both operands are heap objects.
7184 __ JumpIfEitherSmi(left, right, &miss);
7185
7186 // Check that both operands are strings. This leaves the instance
7187 // types loaded in tmp1 and tmp2.
7188 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
7189 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
7190 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
7191 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
7192 STATIC_ASSERT(kNotStringTag != 0);
7193 __ Or(tmp3, tmp1, tmp2);
7194 __ And(tmp5, tmp3, Operand(kIsNotStringMask));
7195 __ Branch(&miss, ne, tmp5, Operand(zero_reg));
7196
7197 // Fast check for identical strings.
7198 Label left_ne_right;
7199 STATIC_ASSERT(EQUAL == 0);
7200 STATIC_ASSERT(kSmiTag == 0);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007201 __ Branch(&left_ne_right, ne, left, Operand(right));
7202 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007203 __ mov(v0, zero_reg); // In the delay slot.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007204 __ bind(&left_ne_right);
7205
7206 // Handle not identical strings.
7207
7208 // Check that both strings are symbols. If they are, we're done
7209 // because we already know they are not identical.
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007210 if (equality) {
7211 ASSERT(GetCondition() == eq);
7212 STATIC_ASSERT(kSymbolTag != 0);
7213 __ And(tmp3, tmp1, Operand(tmp2));
7214 __ And(tmp5, tmp3, Operand(kIsSymbolMask));
7215 Label is_symbol;
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007216 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg));
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007217 // Make sure a0 is non-zero. At this point input operands are
7218 // guaranteed to be non-zero.
7219 ASSERT(right.is(a0));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007220 __ Ret(USE_DELAY_SLOT);
7221 __ mov(v0, a0); // In the delay slot.
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007222 __ bind(&is_symbol);
7223 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007224
7225 // Check that both strings are sequential ASCII.
7226 Label runtime;
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007227 __ JumpIfBothInstanceTypesAreNotSequentialAscii(
7228 tmp1, tmp2, tmp3, tmp4, &runtime);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007229
7230 // Compare flat ASCII strings. Returns when done.
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007231 if (equality) {
7232 StringCompareStub::GenerateFlatAsciiStringEquals(
7233 masm, left, right, tmp1, tmp2, tmp3);
7234 } else {
7235 StringCompareStub::GenerateCompareFlatAsciiStrings(
7236 masm, left, right, tmp1, tmp2, tmp3, tmp4);
7237 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007238
7239 // Handle more complex cases in runtime.
7240 __ bind(&runtime);
7241 __ Push(left, right);
yangguo@chromium.org154ff992012-03-13 08:09:54 +00007242 if (equality) {
7243 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
7244 } else {
7245 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
7246 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007247
7248 __ bind(&miss);
7249 GenerateMiss(masm);
karlklose@chromium.org83a47282011-05-11 11:54:09 +00007250}
7251
7252
lrn@chromium.org7516f052011-03-30 08:52:27 +00007253void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007254 ASSERT(state_ == CompareIC::OBJECT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007255 Label miss;
7256 __ And(a2, a1, Operand(a0));
7257 __ JumpIfSmi(a2, &miss);
7258
7259 __ GetObjectType(a0, a2, a2);
7260 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
7261 __ GetObjectType(a1, a2, a2);
7262 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
7263
7264 ASSERT(GetCondition() == eq);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007265 __ Ret(USE_DELAY_SLOT);
7266 __ subu(v0, a0, a1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007267
7268 __ bind(&miss);
7269 GenerateMiss(masm);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007270}
7271
7272
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007273void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
7274 Label miss;
7275 __ And(a2, a1, a0);
7276 __ JumpIfSmi(a2, &miss);
7277 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
7278 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
7279 __ Branch(&miss, ne, a2, Operand(known_map_));
7280 __ Branch(&miss, ne, a3, Operand(known_map_));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007281
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007282 __ Ret(USE_DELAY_SLOT);
7283 __ subu(v0, a0, a1);
7284
7285 __ bind(&miss);
7286 GenerateMiss(masm);
7287}
7288
7289void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007290 {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007291 // Call the runtime system in a fresh internal frame.
7292 ExternalReference miss =
7293 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007294 FrameScope scope(masm, StackFrame::INTERNAL);
7295 __ Push(a1, a0);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007296 __ push(ra);
7297 __ Push(a1, a0);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007298 __ li(t0, Operand(Smi::FromInt(op_)));
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007299 __ addiu(sp, sp, -kPointerSize);
7300 __ CallExternalReference(miss, 3, USE_DELAY_SLOT);
7301 __ sw(t0, MemOperand(sp)); // In the delay slot.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007302 // Compute the entry point of the rewritten stub.
7303 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
7304 // Restore registers.
7305 __ Pop(a1, a0, ra);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007306 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007307 __ Jump(a2);
7308}
7309
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00007310
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007311void DirectCEntryStub::Generate(MacroAssembler* masm) {
7312 // No need to pop or drop anything, LeaveExitFrame will restore the old
7313 // stack, thus dropping the allocated space for the return value.
7314 // The saved ra is after the reserved stack space for the 4 args.
7315 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
7316
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007317 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007318 // In case of an error the return address may point to a memory area
7319 // filled with kZapValue by the GC.
7320 // Dereference the address and check for this.
7321 __ lw(t0, MemOperand(t9));
7322 __ Assert(ne, "Received invalid return address.", t0,
7323 Operand(reinterpret_cast<uint32_t>(kZapValue)));
7324 }
7325 __ Jump(t9);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007326}
7327
7328
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007329void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
7330 ExternalReference function) {
7331 __ li(t9, Operand(function));
7332 this->GenerateCall(masm, t9);
7333}
7334
erik.corry@gmail.comd6076d92011-06-06 09:39:18 +00007335
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007336void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
7337 Register target) {
7338 __ Move(t9, target);
7339 __ AssertStackIsAligned();
7340 // Allocate space for arg slots.
7341 __ Subu(sp, sp, kCArgsSlotsSize);
7342
7343 // Block the trampoline pool through the whole function to make sure the
7344 // number of generated instructions is constant.
7345 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
7346
7347 // We need to get the current 'pc' value, which is not available on MIPS.
7348 Label find_ra;
7349 masm->bal(&find_ra); // ra = pc + 8.
7350 masm->nop(); // Branch delay slot nop.
7351 masm->bind(&find_ra);
7352
7353 const int kNumInstructionsToJump = 6;
7354 masm->addiu(ra, ra, kNumInstructionsToJump * kPointerSize);
7355 // Push return address (accessible to GC through exit frame pc).
7356 // This spot for ra was reserved in EnterExitFrame.
7357 masm->sw(ra, MemOperand(sp, kCArgsSlotsSize));
danno@chromium.org88aa0582012-03-23 15:11:57 +00007358 masm->li(ra,
7359 Operand(reinterpret_cast<intptr_t>(GetCode().location()),
7360 RelocInfo::CODE_TARGET),
7361 CONSTANT_SIZE);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007362 // Call the function.
7363 masm->Jump(t9);
7364 // Make sure the stored 'ra' points to this position.
7365 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra));
7366}
7367
7368
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007369void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
7370 Label* miss,
7371 Label* done,
7372 Register receiver,
7373 Register properties,
7374 Handle<String> name,
7375 Register scratch0) {
7376 // If names of slots in range from 1 to kProbes - 1 for the hash value are
7377 // not equal to the name and kProbes-th slot is not used (its name is the
7378 // undefined value), it guarantees the hash table doesn't contain the
7379 // property. It's true even if some slots represent deleted properties
ulan@chromium.org967e2702012-02-28 09:49:15 +00007380 // (their names are the hole value).
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007381 for (int i = 0; i < kInlinedProbes; i++) {
7382 // scratch0 points to properties hash.
7383 // Compute the masked index: (hash + i + i * i) & mask.
7384 Register index = scratch0;
7385 // Capacity is smi 2^n.
7386 __ lw(index, FieldMemOperand(properties, kCapacityOffset));
7387 __ Subu(index, index, Operand(1));
7388 __ And(index, index, Operand(
7389 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
7390
7391 // Scale the index by multiplying by the entry size.
7392 ASSERT(StringDictionary::kEntrySize == 3);
7393 __ sll(at, index, 1);
7394 __ Addu(index, index, at);
7395
7396 Register entity_name = scratch0;
7397 // Having undefined at this place means the name is not contained.
7398 ASSERT_EQ(kSmiTagSize, 1);
7399 Register tmp = properties;
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007400 __ sll(scratch0, index, 1);
7401 __ Addu(tmp, properties, scratch0);
7402 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
7403
7404 ASSERT(!tmp.is(entity_name));
7405 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
7406 __ Branch(done, eq, entity_name, Operand(tmp));
7407
7408 if (i != kInlinedProbes - 1) {
ulan@chromium.org967e2702012-02-28 09:49:15 +00007409 // Load the hole ready for use below:
7410 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
7411
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007412 // Stop if found the property.
7413 __ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
7414
ulan@chromium.org967e2702012-02-28 09:49:15 +00007415 Label the_hole;
7416 __ Branch(&the_hole, eq, entity_name, Operand(tmp));
7417
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007418 // Check if the entry name is not a symbol.
7419 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
7420 __ lbu(entity_name,
7421 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
7422 __ And(scratch0, entity_name, Operand(kIsSymbolMask));
7423 __ Branch(miss, eq, scratch0, Operand(zero_reg));
7424
ulan@chromium.org967e2702012-02-28 09:49:15 +00007425 __ bind(&the_hole);
7426
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007427 // Restore the properties.
7428 __ lw(properties,
7429 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7430 }
7431 }
7432
7433 const int spill_mask =
7434 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007435 a2.bit() | a1.bit() | a0.bit() | v0.bit());
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007436
7437 __ MultiPush(spill_mask);
7438 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7439 __ li(a1, Operand(Handle<String>(name)));
7440 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00007441 __ CallStub(&stub);
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007442 __ mov(at, v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007443 __ MultiPop(spill_mask);
7444
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007445 __ Branch(done, eq, at, Operand(zero_reg));
7446 __ Branch(miss, ne, at, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007447}
7448
7449
7450// Probe the string dictionary in the |elements| register. Jump to the
7451// |done| label if a property with the given name is found. Jump to
7452// the |miss| label otherwise.
7453// If lookup was successful |scratch2| will be equal to elements + 4 * index.
7454void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
7455 Label* miss,
7456 Label* done,
7457 Register elements,
7458 Register name,
7459 Register scratch1,
7460 Register scratch2) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00007461 ASSERT(!elements.is(scratch1));
7462 ASSERT(!elements.is(scratch2));
7463 ASSERT(!name.is(scratch1));
7464 ASSERT(!name.is(scratch2));
7465
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +00007466 __ AssertString(name);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007467
7468 // Compute the capacity mask.
7469 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset));
7470 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int
7471 __ Subu(scratch1, scratch1, Operand(1));
7472
7473 // Generate an unrolled loop that performs a few probes before
7474 // giving up. Measurements done on Gmail indicate that 2 probes
7475 // cover ~93% of loads from dictionaries.
7476 for (int i = 0; i < kInlinedProbes; i++) {
7477 // Compute the masked index: (hash + i + i * i) & mask.
7478 __ lw(scratch2, FieldMemOperand(name, String::kHashFieldOffset));
7479 if (i > 0) {
7480 // Add the probe offset (i + i * i) left shifted to avoid right shifting
7481 // the hash in a separate instruction. The value hash + i + i * i is right
7482 // shifted in the following and instruction.
7483 ASSERT(StringDictionary::GetProbeOffset(i) <
7484 1 << (32 - String::kHashFieldOffset));
7485 __ Addu(scratch2, scratch2, Operand(
7486 StringDictionary::GetProbeOffset(i) << String::kHashShift));
7487 }
7488 __ srl(scratch2, scratch2, String::kHashShift);
7489 __ And(scratch2, scratch1, scratch2);
7490
7491 // Scale the index by multiplying by the element size.
7492 ASSERT(StringDictionary::kEntrySize == 3);
7493 // scratch2 = scratch2 * 3.
7494
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007495 __ sll(at, scratch2, 1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007496 __ Addu(scratch2, scratch2, at);
7497
7498 // Check if the key is identical to the name.
7499 __ sll(at, scratch2, 2);
7500 __ Addu(scratch2, elements, at);
7501 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset));
7502 __ Branch(done, eq, name, Operand(at));
7503 }
7504
7505 const int spill_mask =
7506 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007507 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007508 ~(scratch1.bit() | scratch2.bit());
7509
7510 __ MultiPush(spill_mask);
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007511 if (name.is(a0)) {
7512 ASSERT(!elements.is(a1));
7513 __ Move(a1, name);
7514 __ Move(a0, elements);
7515 } else {
7516 __ Move(a0, elements);
7517 __ Move(a1, name);
7518 }
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007519 StringDictionaryLookupStub stub(POSITIVE_LOOKUP);
7520 __ CallStub(&stub);
7521 __ mov(scratch2, a2);
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007522 __ mov(at, v0);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007523 __ MultiPop(spill_mask);
7524
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007525 __ Branch(done, ne, at, Operand(zero_reg));
7526 __ Branch(miss, eq, at, Operand(zero_reg));
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007527}
7528
7529
7530void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00007531 // This stub overrides SometimesSetsUpAFrame() to return false. That means
7532 // we cannot call anything that could cause a GC from this stub.
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007533 // Registers:
7534 // result: StringDictionary to probe
7535 // a1: key
7536 // : StringDictionary to probe.
7537 // index_: will hold an index of entry if lookup is successful.
7538 // might alias with result_.
7539 // Returns:
7540 // result_ is zero if lookup failed, non zero otherwise.
7541
7542 Register result = v0;
7543 Register dictionary = a0;
7544 Register key = a1;
7545 Register index = a2;
7546 Register mask = a3;
7547 Register hash = t0;
7548 Register undefined = t1;
7549 Register entry_key = t2;
7550
7551 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
7552
7553 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset));
7554 __ sra(mask, mask, kSmiTagSize);
7555 __ Subu(mask, mask, Operand(1));
7556
7557 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset));
7558
7559 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
7560
7561 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
7562 // Compute the masked index: (hash + i + i * i) & mask.
7563 // Capacity is smi 2^n.
7564 if (i > 0) {
7565 // Add the probe offset (i + i * i) left shifted to avoid right shifting
7566 // the hash in a separate instruction. The value hash + i + i * i is right
7567 // shifted in the following and instruction.
7568 ASSERT(StringDictionary::GetProbeOffset(i) <
7569 1 << (32 - String::kHashFieldOffset));
7570 __ Addu(index, hash, Operand(
7571 StringDictionary::GetProbeOffset(i) << String::kHashShift));
7572 } else {
7573 __ mov(index, hash);
7574 }
7575 __ srl(index, index, String::kHashShift);
7576 __ And(index, mask, index);
7577
7578 // Scale the index by multiplying by the entry size.
7579 ASSERT(StringDictionary::kEntrySize == 3);
7580 // index *= 3.
7581 __ mov(at, index);
7582 __ sll(index, index, 1);
7583 __ Addu(index, index, at);
7584
7585
7586 ASSERT_EQ(kSmiTagSize, 1);
7587 __ sll(index, index, 2);
7588 __ Addu(index, index, dictionary);
7589 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
7590
7591 // Having undefined at this place means the name is not contained.
7592 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
7593
7594 // Stop if found the property.
7595 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
7596
7597 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
7598 // Check if the entry name is not a symbol.
7599 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
7600 __ lbu(entry_key,
7601 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
7602 __ And(result, entry_key, Operand(kIsSymbolMask));
7603 __ Branch(&maybe_in_dictionary, eq, result, Operand(zero_reg));
7604 }
7605 }
7606
7607 __ bind(&maybe_in_dictionary);
7608 // If we are doing negative lookup then probing failure should be
7609 // treated as a lookup success. For positive lookup probing failure
7610 // should be treated as lookup failure.
7611 if (mode_ == POSITIVE_LOOKUP) {
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007612 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007613 __ mov(result, zero_reg);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007614 }
7615
7616 __ bind(&in_dictionary);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007617 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007618 __ li(result, 1);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007619
7620 __ bind(&not_in_dictionary);
ulan@chromium.org6ff65142012-03-21 09:52:17 +00007621 __ Ret(USE_DELAY_SLOT);
vegorov@chromium.org7304bca2011-05-16 12:14:13 +00007622 __ mov(result, zero_reg);
lrn@chromium.org7516f052011-03-30 08:52:27 +00007623}
7624
7625
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007626struct AheadOfTimeWriteBarrierStubList {
7627 Register object, value, address;
7628 RememberedSetAction action;
7629};
7630
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007631#define REG(Name) { kRegister_ ## Name ## _Code }
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007632
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007633static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007634 // Used in RegExpExecStub.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007635 { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET },
7636 { REG(s2), REG(a2), REG(t3), EMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007637 // Used in CompileArrayPushCall.
7638 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
7639 // Also used in KeyedStoreIC::GenerateGeneric.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007640 { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007641 // Used in CompileStoreGlobal.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007642 { REG(t0), REG(a1), REG(a2), OMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007643 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007644 { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
7645 { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007646 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007647 { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
7648 { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007649 // KeyedStoreStubCompiler::GenerateStoreFastElement.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007650 { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
7651 { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007652 // ElementsTransitionGenerator::GenerateMapChangeElementTransition
7653 // and ElementsTransitionGenerator::GenerateSmiToDouble
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007654 // and ElementsTransitionGenerator::GenerateDoubleToObject
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007655 { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
7656 { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
erik.corry@gmail.com394dbcf2011-10-27 07:38:48 +00007657 // ElementsTransitionGenerator::GenerateDoubleToObject
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007658 { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
7659 { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
erikcorry0ad885c2011-11-21 13:51:57 +00007660 // StoreArrayLiteralElementStub::Generate
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007661 { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
yangguo@chromium.org5a11aaf2012-06-20 11:29:00 +00007662 // FastNewClosureStub::Generate
7663 { REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007664 // Null termination.
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007665 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007666};
7667
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007668#undef REG
7669
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007670
7671bool RecordWriteStub::IsPregenerated() {
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007672 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007673 !entry->object.is(no_reg);
7674 entry++) {
7675 if (object_.is(entry->object) &&
7676 value_.is(entry->value) &&
7677 address_.is(entry->address) &&
7678 remembered_set_action_ == entry->action &&
7679 save_fp_regs_mode_ == kDontSaveFPRegs) {
7680 return true;
7681 }
7682 }
7683 return false;
7684}
7685
7686
7687bool StoreBufferOverflowStub::IsPregenerated() {
7688 return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
7689}
7690
7691
7692void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
7693 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
7694 stub1.GetCode()->set_is_pregenerated(true);
7695}
7696
7697
7698void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
fschneider@chromium.org7d10be52012-04-10 12:30:14 +00007699 for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007700 !entry->object.is(no_reg);
7701 entry++) {
7702 RecordWriteStub stub(entry->object,
7703 entry->value,
7704 entry->address,
7705 entry->action,
7706 kDontSaveFPRegs);
7707 stub.GetCode()->set_is_pregenerated(true);
7708 }
7709}
7710
7711
verwaest@chromium.org33e09c82012-10-10 17:07:22 +00007712bool CodeStub::CanUseFPRegisters() {
7713 return CpuFeatures::IsSupported(FPU);
7714}
7715
7716
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007717// Takes the input in 3 registers: address_ value_ and object_. A pointer to
7718// the value has just been written into the object, now this stub makes sure
7719// we keep the GC informed. The word in the object where the value has been
7720// written is in the address register.
7721void RecordWriteStub::Generate(MacroAssembler* masm) {
7722 Label skip_to_incremental_noncompacting;
7723 Label skip_to_incremental_compacting;
7724
7725 // The first two branch+nop instructions are generated with labels so as to
7726 // get the offset fixed up correctly by the bind(Label*) call. We patch it
7727 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
7728 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
7729 // incremental heap marking.
7730 // See RecordWriteStub::Patch for details.
7731 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
7732 __ nop();
7733 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
7734 __ nop();
7735
7736 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7737 __ RememberedSetHelper(object_,
7738 address_,
7739 value_,
7740 save_fp_regs_mode_,
7741 MacroAssembler::kReturnAtEnd);
7742 }
7743 __ Ret();
7744
7745 __ bind(&skip_to_incremental_noncompacting);
7746 GenerateIncremental(masm, INCREMENTAL);
7747
7748 __ bind(&skip_to_incremental_compacting);
7749 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
7750
7751 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
7752 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
7753
7754 PatchBranchIntoNop(masm, 0);
7755 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
7756}
7757
7758
7759void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7760 regs_.Save(masm);
7761
7762 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7763 Label dont_need_remembered_set;
7764
7765 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7766 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
7767 regs_.scratch0(),
7768 &dont_need_remembered_set);
7769
7770 __ CheckPageFlag(regs_.object(),
7771 regs_.scratch0(),
7772 1 << MemoryChunk::SCAN_ON_SCAVENGE,
7773 ne,
7774 &dont_need_remembered_set);
7775
7776 // First notify the incremental marker if necessary, then update the
7777 // remembered set.
7778 CheckNeedsToInformIncrementalMarker(
7779 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7780 InformIncrementalMarker(masm, mode);
7781 regs_.Restore(masm);
7782 __ RememberedSetHelper(object_,
7783 address_,
7784 value_,
7785 save_fp_regs_mode_,
7786 MacroAssembler::kReturnAtEnd);
7787
7788 __ bind(&dont_need_remembered_set);
7789 }
7790
7791 CheckNeedsToInformIncrementalMarker(
7792 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7793 InformIncrementalMarker(masm, mode);
7794 regs_.Restore(masm);
7795 __ Ret();
7796}
7797
7798
7799void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7800 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7801 int argument_count = 3;
7802 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7803 Register address =
7804 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
7805 ASSERT(!address.is(regs_.object()));
7806 ASSERT(!address.is(a0));
7807 __ Move(address, regs_.address());
7808 __ Move(a0, regs_.object());
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007809 __ Move(a1, address);
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007810 __ li(a2, Operand(ExternalReference::isolate_address()));
7811
7812 AllowExternalCallThatCantCauseGC scope(masm);
7813 if (mode == INCREMENTAL_COMPACTION) {
7814 __ CallCFunction(
7815 ExternalReference::incremental_evacuation_record_write_function(
7816 masm->isolate()),
7817 argument_count);
7818 } else {
7819 ASSERT(mode == INCREMENTAL);
7820 __ CallCFunction(
7821 ExternalReference::incremental_marking_record_write_function(
7822 masm->isolate()),
7823 argument_count);
7824 }
7825 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7826}
7827
7828
7829void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7830 MacroAssembler* masm,
7831 OnNoNeedToInformIncrementalMarker on_no_need,
7832 Mode mode) {
7833 Label on_black;
7834 Label need_incremental;
7835 Label need_incremental_pop_scratch;
7836
rossberg@chromium.org89e18f52012-10-22 13:09:53 +00007837 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
7838 __ lw(regs_.scratch1(),
7839 MemOperand(regs_.scratch0(),
7840 MemoryChunk::kWriteBarrierCounterOffset));
7841 __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1));
7842 __ sw(regs_.scratch1(),
7843 MemOperand(regs_.scratch0(),
7844 MemoryChunk::kWriteBarrierCounterOffset));
7845 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
7846
rossberg@chromium.orgb4b2aa62011-10-13 09:49:59 +00007847 // Let's look at the color of the object: If it is not black we don't have
7848 // to inform the incremental marker.
7849 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7850
7851 regs_.Restore(masm);
7852 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7853 __ RememberedSetHelper(object_,
7854 address_,
7855 value_,
7856 save_fp_regs_mode_,
7857 MacroAssembler::kReturnAtEnd);
7858 } else {
7859 __ Ret();
7860 }
7861
7862 __ bind(&on_black);
7863
7864 // Get the value from the slot.
7865 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7866
7867 if (mode == INCREMENTAL_COMPACTION) {
7868 Label ensure_not_white;
7869
7870 __ CheckPageFlag(regs_.scratch0(), // Contains value.
7871 regs_.scratch1(), // Scratch.
7872 MemoryChunk::kEvacuationCandidateMask,
7873 eq,
7874 &ensure_not_white);
7875
7876 __ CheckPageFlag(regs_.object(),
7877 regs_.scratch1(), // Scratch.
7878 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
7879 eq,
7880 &need_incremental);
7881
7882 __ bind(&ensure_not_white);
7883 }
7884
7885 // We need extra registers for this, so we push the object and the address
7886 // register temporarily.
7887 __ Push(regs_.object(), regs_.address());
7888 __ EnsureNotWhite(regs_.scratch0(), // The value.
7889 regs_.scratch1(), // Scratch.
7890 regs_.object(), // Scratch.
7891 regs_.address(), // Scratch.
7892 &need_incremental_pop_scratch);
7893 __ Pop(regs_.object(), regs_.address());
7894
7895 regs_.Restore(masm);
7896 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7897 __ RememberedSetHelper(object_,
7898 address_,
7899 value_,
7900 save_fp_regs_mode_,
7901 MacroAssembler::kReturnAtEnd);
7902 } else {
7903 __ Ret();
7904 }
7905
7906 __ bind(&need_incremental_pop_scratch);
7907 __ Pop(regs_.object(), regs_.address());
7908
7909 __ bind(&need_incremental);
7910
7911 // Fall through when we need to inform the incremental marker.
7912}
7913
7914
erikcorry0ad885c2011-11-21 13:51:57 +00007915void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
7916 // ----------- S t a t e -------------
7917 // -- a0 : element value to store
7918 // -- a1 : array literal
7919 // -- a2 : map of array literal
7920 // -- a3 : element index as smi
7921 // -- t0 : array literal index in function as smi
7922 // -----------------------------------
7923
7924 Label element_done;
7925 Label double_elements;
7926 Label smi_element;
7927 Label slow_elements;
7928 Label fast_elements;
7929
7930 __ CheckFastElements(a2, t1, &double_elements);
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007931 // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
erikcorry0ad885c2011-11-21 13:51:57 +00007932 __ JumpIfSmi(a0, &smi_element);
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007933 __ CheckFastSmiElements(a2, t1, &fast_elements);
erikcorry0ad885c2011-11-21 13:51:57 +00007934
7935 // Store into the array literal requires a elements transition. Call into
7936 // the runtime.
7937 __ bind(&slow_elements);
7938 // call.
7939 __ Push(a1, a3, a0);
7940 __ lw(t1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
7941 __ lw(t1, FieldMemOperand(t1, JSFunction::kLiteralsOffset));
7942 __ Push(t1, t0);
7943 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
7944
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007945 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
erikcorry0ad885c2011-11-21 13:51:57 +00007946 __ bind(&fast_elements);
7947 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
7948 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
7949 __ Addu(t2, t1, t2);
7950 __ Addu(t2, t2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7951 __ sw(a0, MemOperand(t2, 0));
7952 // Update the write barrier for the array store.
7953 __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs,
7954 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007955 __ Ret(USE_DELAY_SLOT);
7956 __ mov(v0, a0);
erikcorry0ad885c2011-11-21 13:51:57 +00007957
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007958 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
7959 // and value is Smi.
erikcorry0ad885c2011-11-21 13:51:57 +00007960 __ bind(&smi_element);
7961 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
7962 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
7963 __ Addu(t2, t1, t2);
7964 __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007965 __ Ret(USE_DELAY_SLOT);
7966 __ mov(v0, a0);
erikcorry0ad885c2011-11-21 13:51:57 +00007967
svenpanne@chromium.org830d30c2012-05-29 13:20:14 +00007968 // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
erikcorry0ad885c2011-11-21 13:51:57 +00007969 __ bind(&double_elements);
7970 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
ulan@chromium.org8e8d8822012-11-23 14:36:46 +00007971 __ StoreNumberToDoubleElements(a0, a3,
rossberg@chromium.org89e18f52012-10-22 13:09:53 +00007972 // Overwrites all regs after this.
7973 t1, t2, t3, t5, a2,
erikcorry0ad885c2011-11-21 13:51:57 +00007974 &slow_elements);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00007975 __ Ret(USE_DELAY_SLOT);
7976 __ mov(v0, a0);
erikcorry0ad885c2011-11-21 13:51:57 +00007977}
7978
7979
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +00007980void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
7981 ASSERT(!Serializer::enabled());
7982 bool save_fp_regs = CpuFeatures::IsSupported(FPU);
7983 CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
7984 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
7985 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
7986 __ Ret();
7987}
7988
7989
danno@chromium.org129d3982012-07-25 15:01:47 +00007990void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
7991 if (entry_hook_ != NULL) {
7992 ProfileEntryHookStub stub;
7993 __ push(ra);
7994 __ CallStub(&stub);
7995 __ pop(ra);
7996 }
7997}
7998
7999
8000void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
8001 // The entry hook is a "push ra" instruction, followed by a call.
8002 // Note: on MIPS "push" is 2 instruction
8003 const int32_t kReturnAddressDistanceFromFunctionStart =
8004 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
8005
8006 // Save live volatile registers.
8007 __ Push(ra, t1, a1);
8008 const int32_t kNumSavedRegs = 3;
8009
8010 // Compute the function's address for the first argument.
8011 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
8012
8013 // The caller's return address is above the saved temporaries.
8014 // Grab that for the second argument to the hook.
8015 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
8016
8017 // Align the stack if necessary.
8018 int frame_alignment = masm->ActivationFrameAlignment();
8019 if (frame_alignment > kPointerSize) {
8020 __ mov(t1, sp);
8021 ASSERT(IsPowerOf2(frame_alignment));
8022 __ And(sp, sp, Operand(-frame_alignment));
8023 }
8024
8025#if defined(V8_HOST_ARCH_MIPS)
8026 __ li(at, Operand(reinterpret_cast<int32_t>(&entry_hook_)));
8027 __ lw(at, MemOperand(at));
8028#else
8029 // Under the simulator we need to indirect the entry hook through a
8030 // trampoline function at a known address.
8031 Address trampoline_address = reinterpret_cast<Address>(
8032 reinterpret_cast<intptr_t>(EntryHookTrampoline));
8033 ApiFunction dispatcher(trampoline_address);
8034 __ li(at, Operand(ExternalReference(&dispatcher,
8035 ExternalReference::BUILTIN_CALL,
8036 masm->isolate())));
8037#endif
8038 __ Call(at);
8039
8040 // Restore the stack pointer if needed.
8041 if (frame_alignment > kPointerSize) {
8042 __ mov(sp, t1);
8043 }
8044
8045 __ Pop(ra, t1, a1);
8046 __ Ret();
8047}
8048
8049
lrn@chromium.org7516f052011-03-30 08:52:27 +00008050#undef __
8051
8052} } // namespace v8::internal
8053
8054#endif // V8_TARGET_ARCH_MIPS