blob: 85e74b85667ec4aaaf4e53406665817638a81216 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Kristian Monsen80d68ea2010-09-08 11:05:35 +010032#include "bootstrapper.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "isolate.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000035#include "jsregexp.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010036#include "regexp-macro-assembler.h"
37
38namespace v8 {
39namespace internal {
40
41#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010042
43void ToNumberStub::Generate(MacroAssembler* masm) {
44 // The ToNumber stub takes one argument in eax.
Ben Murdoch257744e2011-11-30 15:57:28 +000045 Label check_heap_number, call_builtin;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010047 __ ret(0);
48
49 __ bind(&check_heap_number);
50 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010051 Factory* factory = masm->isolate()->factory();
52 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +000053 __ j(not_equal, &call_builtin, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010054 __ ret(0);
55
56 __ bind(&call_builtin);
57 __ pop(ecx); // Pop return address.
58 __ push(eax);
59 __ push(ecx); // Push return address.
60 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
61}
62
63
Kristian Monsen80d68ea2010-09-08 11:05:35 +010064void FastNewClosureStub::Generate(MacroAssembler* masm) {
65 // Create a new closure from the given function info in new
66 // space. Set the context to the current context in esi.
67 Label gc;
68 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
69
70 // Get the function info from the stack.
71 __ mov(edx, Operand(esp, 1 * kPointerSize));
72
Steve Block44f0eee2011-05-26 01:26:41 +010073 int map_index = strict_mode_ == kStrictMode
74 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
75 : Context::FUNCTION_MAP_INDEX;
76
Kristian Monsen80d68ea2010-09-08 11:05:35 +010077 // Compute the function map in the current global context and set that
78 // as the map of the allocated object.
79 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
80 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010081 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010082 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
83
84 // Initialize the rest of the function. We don't have to update the
85 // write barrier because the allocated object is in new space.
Steve Block44f0eee2011-05-26 01:26:41 +010086 Factory* factory = masm->isolate()->factory();
87 __ mov(ebx, Immediate(factory->empty_fixed_array()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010088 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
89 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
90 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010091 Immediate(factory->the_hole_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010092 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
93 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
94 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010095 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010096 Immediate(factory->undefined_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010097
98 // Initialize the code pointer in the function to be the one
99 // found in the shared function info object.
100 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
101 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
102 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
103
104 // Return and remove the on-stack parameter.
105 __ ret(1 * kPointerSize);
106
107 // Create a new closure through the slower runtime call.
108 __ bind(&gc);
109 __ pop(ecx); // Temporarily remove return address.
110 __ pop(edx);
111 __ push(esi);
112 __ push(edx);
Steve Block44f0eee2011-05-26 01:26:41 +0100113 __ push(Immediate(factory->false_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100114 __ push(ecx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800115 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100116}
117
118
119void FastNewContextStub::Generate(MacroAssembler* masm) {
120 // Try to allocate the context in new space.
121 Label gc;
122 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
123 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
124 eax, ebx, ecx, &gc, TAG_OBJECT);
125
126 // Get the function from the stack.
127 __ mov(ecx, Operand(esp, 1 * kPointerSize));
128
129 // Setup the object header.
Steve Block44f0eee2011-05-26 01:26:41 +0100130 Factory* factory = masm->isolate()->factory();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000131 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
132 factory->function_context_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133 __ mov(FieldOperand(eax, Context::kLengthOffset),
134 Immediate(Smi::FromInt(length)));
135
136 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100137 __ Set(ebx, Immediate(0)); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100138 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000139 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100140 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
141
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000142 // Copy the global object from the previous context.
143 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100144 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
145
146 // Initialize the rest of the slots to undefined.
Steve Block44f0eee2011-05-26 01:26:41 +0100147 __ mov(ebx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100148 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
149 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
150 }
151
152 // Return and remove the on-stack parameter.
153 __ mov(esi, Operand(eax));
154 __ ret(1 * kPointerSize);
155
156 // Need to collect. Call into runtime system.
157 __ bind(&gc);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100159}
160
161
162void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
163 // Stack layout on entry:
164 //
165 // [esp + kPointerSize]: constant elements.
166 // [esp + (2 * kPointerSize)]: literal index.
167 // [esp + (3 * kPointerSize)]: literals array.
168
169 // All sizes here are multiples of kPointerSize.
170 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
171 int size = JSArray::kSize + elements_size;
172
173 // Load boilerplate object into ecx and check if we need to create a
174 // boilerplate.
175 Label slow_case;
176 __ mov(ecx, Operand(esp, 3 * kPointerSize));
177 __ mov(eax, Operand(esp, 2 * kPointerSize));
178 STATIC_ASSERT(kPointerSize == 4);
179 STATIC_ASSERT(kSmiTagSize == 1);
180 STATIC_ASSERT(kSmiTag == 0);
181 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
182 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100183 Factory* factory = masm->isolate()->factory();
184 __ cmp(ecx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100185 __ j(equal, &slow_case);
186
187 if (FLAG_debug_code) {
188 const char* message;
189 Handle<Map> expected_map;
190 if (mode_ == CLONE_ELEMENTS) {
191 message = "Expected (writable) fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100192 expected_map = factory->fixed_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100193 } else {
194 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
195 message = "Expected copy-on-write fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100196 expected_map = factory->fixed_cow_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100197 }
198 __ push(ecx);
199 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
200 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
201 __ Assert(equal, message);
202 __ pop(ecx);
203 }
204
205 // Allocate both the JS array and the elements array in one big
206 // allocation. This avoids multiple limit checks.
207 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
208
209 // Copy the JS array part.
210 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
211 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
212 __ mov(ebx, FieldOperand(ecx, i));
213 __ mov(FieldOperand(eax, i), ebx);
214 }
215 }
216
217 if (length_ > 0) {
218 // Get hold of the elements array of the boilerplate and setup the
219 // elements pointer in the resulting object.
220 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
221 __ lea(edx, Operand(eax, JSArray::kSize));
222 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
223
224 // Copy the elements array.
225 for (int i = 0; i < elements_size; i += kPointerSize) {
226 __ mov(ebx, FieldOperand(ecx, i));
227 __ mov(FieldOperand(edx, i), ebx);
228 }
229 }
230
231 // Return and remove the on-stack parameters.
232 __ ret(3 * kPointerSize);
233
234 __ bind(&slow_case);
235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
236}
237
238
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000239// The stub expects its argument on the stack and returns its result in tos_:
240// zero for false, and a non-zero value for true.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100241void ToBooleanStub::Generate(MacroAssembler* masm) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000242 Label patch;
Ben Murdoch257744e2011-11-30 15:57:28 +0000243 Factory* factory = masm->isolate()->factory();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000244 const Register argument = eax;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000245 const Register map = edx;
246
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000247 if (!types_.IsEmpty()) {
248 __ mov(argument, Operand(esp, 1 * kPointerSize));
249 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000250
251 // undefined -> false
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
Ben Murdoch257744e2011-11-30 15:57:28 +0000253
254 // Boolean -> its value
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100257
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000258 // 'null' -> false.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100260
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000261 if (types_.Contains(SMI)) {
262 // Smis: 0 -> false, all other -> true
263 Label not_smi;
264 __ JumpIfNotSmi(argument, &not_smi, Label::kNear);
265 // argument contains the correct return value already.
266 if (!tos_.is(argument)) {
267 __ mov(tos_, argument);
268 }
269 __ ret(1 * kPointerSize);
270 __ bind(&not_smi);
271 } else if (types_.NeedsMap()) {
272 // If we need a map later and have a Smi -> patch.
273 __ JumpIfSmi(argument, &patch, Label::kNear);
274 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000276 if (types_.NeedsMap()) {
277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100278
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000279 if (types_.CanBeUndetectable()) {
280 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
281 1 << Map::kIsUndetectable);
282 // Undetectable -> false.
283 Label not_undetectable;
284 __ j(zero, &not_undetectable, Label::kNear);
285 __ Set(tos_, Immediate(0));
286 __ ret(1 * kPointerSize);
287 __ bind(&not_undetectable);
288 }
289 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100290
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000291 if (types_.Contains(SPEC_OBJECT)) {
292 // spec object -> true.
293 Label not_js_object;
294 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
295 __ j(below, &not_js_object, Label::kNear);
296 // argument contains the correct return value already.
297 if (!tos_.is(argument)) {
298 __ Set(tos_, Immediate(1));
299 }
300 __ ret(1 * kPointerSize);
301 __ bind(&not_js_object);
302 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100303
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000304 if (types_.Contains(STRING)) {
305 // String value -> false iff empty.
306 Label not_string;
307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
308 __ j(above_equal, &not_string, Label::kNear);
309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset));
310 __ ret(1 * kPointerSize); // the string length is OK as the return value
311 __ bind(&not_string);
312 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100313
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000314 if (types_.Contains(HEAP_NUMBER)) {
315 // heap number -> false iff +0, -0, or NaN.
316 Label not_heap_number, false_result;
317 __ cmp(map, factory->heap_number_map());
318 __ j(not_equal, &not_heap_number, Label::kNear);
319 __ fldz();
320 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset));
321 __ FCmp();
322 __ j(zero, &false_result, Label::kNear);
323 // argument contains the correct return value already.
324 if (!tos_.is(argument)) {
325 __ Set(tos_, Immediate(1));
326 }
327 __ ret(1 * kPointerSize);
328 __ bind(&false_result);
329 __ Set(tos_, Immediate(0));
330 __ ret(1 * kPointerSize);
331 __ bind(&not_heap_number);
332 }
333
334 __ bind(&patch);
335 GenerateTypeTransition(masm);
336}
337
338
339void ToBooleanStub::CheckOddball(MacroAssembler* masm,
340 Type type,
341 Heap::RootListIndex value,
342 bool result) {
343 const Register argument = eax;
344 if (types_.Contains(type)) {
345 // If we see an expected oddball, return its ToBoolean value tos_.
346 Label different_value;
347 __ CompareRoot(argument, value);
348 __ j(not_equal, &different_value, Label::kNear);
349 if (!result) {
350 // If we have to return zero, there is no way around clearing tos_.
351 __ Set(tos_, Immediate(0));
352 } else if (!tos_.is(argument)) {
353 // If we have to return non-zero, we can re-use the argument if it is the
354 // same register as the result, because we never see Smi-zero here.
355 __ Set(tos_, Immediate(1));
356 }
357 __ ret(1 * kPointerSize);
358 __ bind(&different_value);
359 }
360}
361
362
363void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
364 __ pop(ecx); // Get return address, operand is now on top of stack.
365 __ push(Immediate(Smi::FromInt(tos_.code())));
366 __ push(Immediate(Smi::FromInt(types_.ToByte())));
367 __ push(ecx); // Push return address.
368 // Patch the caller to an appropriate specialized stub and return the
369 // operation result to the caller of the stub.
370 __ TailCallExternalReference(
371 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
372 3,
373 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100374}
375
376
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100377class FloatingPointHelper : public AllStatic {
378 public:
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100379 enum ArgLocation {
380 ARGS_ON_STACK,
381 ARGS_IN_REGISTERS
382 };
383
384 // Code pattern for loading a floating point value. Input value must
385 // be either a smi or a heap number object (fp value). Requirements:
386 // operand in register number. Returns operand as floating point number
387 // on FPU stack.
388 static void LoadFloatOperand(MacroAssembler* masm, Register number);
389
390 // Code pattern for loading floating point values. Input values must
391 // be either smi or heap number objects (fp values). Requirements:
392 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
393 // Returns operands as floating point numbers on FPU stack.
394 static void LoadFloatOperands(MacroAssembler* masm,
395 Register scratch,
396 ArgLocation arg_location = ARGS_ON_STACK);
397
398 // Similar to LoadFloatOperand but assumes that both operands are smis.
399 // Expects operands in edx, eax.
400 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
401
402 // Test if operands are smi or number objects (fp). Requirements:
403 // operand_1 in eax, operand_2 in edx; falls through on float
404 // operands, jumps to the non_float label otherwise.
405 static void CheckFloatOperands(MacroAssembler* masm,
406 Label* non_float,
407 Register scratch);
408
Ben Murdochb0fe1622011-05-05 13:52:32 +0100409 // Checks that the two floating point numbers on top of the FPU stack
410 // have int32 values.
411 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
412 Label* non_int32);
413
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100414 // Takes the operands in edx and eax and loads them as integers in eax
415 // and ecx.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100416 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
417 bool use_sse3,
418 Label* operand_conversion_failure);
419
Ben Murdochb0fe1622011-05-05 13:52:32 +0100420 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
421 // operands are pushed on the stack, and that their conversions to int32
422 // are in eax and ecx. Checks that the original numbers were in the int32
423 // range.
424 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
425 bool use_sse3,
426 Label* not_int32);
427
428 // Assumes that operands are smis or heap numbers and loads them
429 // into xmm0 and xmm1. Operands are in edx and eax.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100430 // Leaves operands unchanged.
431 static void LoadSSE2Operands(MacroAssembler* masm);
432
433 // Test if operands are numbers (smi or HeapNumber objects), and load
434 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
435 // either operand is not a number. Operands are in edx and eax.
436 // Leaves operands unchanged.
437 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
438
439 // Similar to LoadSSE2Operands but assumes that both operands are smis.
440 // Expects operands in edx, eax.
441 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100442
443 // Checks that the two floating point numbers loaded into xmm0 and xmm1
444 // have int32 values.
445 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
446 Label* non_int32,
447 Register scratch);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100448};
449
450
Ben Murdoch257744e2011-11-30 15:57:28 +0000451// Get the integer part of a heap number. Surprisingly, all this bit twiddling
452// is faster than using the built-in instructions on floating point registers.
453// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
454// trashed registers.
455static void IntegerConvert(MacroAssembler* masm,
456 Register source,
457 bool use_sse3,
458 Label* conversion_failure) {
459 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
460 Label done, right_exponent, normal_exponent;
461 Register scratch = ebx;
462 Register scratch2 = edi;
463 // Get exponent word.
464 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
465 // Get exponent alone in scratch2.
466 __ mov(scratch2, scratch);
467 __ and_(scratch2, HeapNumber::kExponentMask);
468 if (use_sse3) {
469 CpuFeatures::Scope scope(SSE3);
470 // Check whether the exponent is too big for a 64 bit signed integer.
471 static const uint32_t kTooBigExponent =
472 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
473 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
474 __ j(greater_equal, conversion_failure);
475 // Load x87 register with heap number.
476 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
477 // Reserve space for 64 bit answer.
478 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
479 // Do conversion, which cannot fail because we checked the exponent.
480 __ fisttp_d(Operand(esp, 0));
481 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
482 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
483 } else {
484 // Load ecx with zero. We use this either for the final shift or
485 // for the answer.
486 __ xor_(ecx, Operand(ecx));
487 // Check whether the exponent matches a 32 bit signed int that cannot be
488 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
489 // exponent is 30 (biased). This is the exponent that we are fastest at and
490 // also the highest exponent we can handle here.
491 const uint32_t non_smi_exponent =
492 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
493 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
494 // If we have a match of the int32-but-not-Smi exponent then skip some
495 // logic.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000496 __ j(equal, &right_exponent, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000497 // If the exponent is higher than that then go to slow case. This catches
498 // numbers that don't fit in a signed int32, infinities and NaNs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000499 __ j(less, &normal_exponent, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000500
501 {
502 // Handle a big exponent. The only reason we have this code is that the
503 // >>> operator has a tendency to generate numbers with an exponent of 31.
504 const uint32_t big_non_smi_exponent =
505 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
506 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
507 __ j(not_equal, conversion_failure);
508 // We have the big exponent, typically from >>>. This means the number is
509 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
510 __ mov(scratch2, scratch);
511 __ and_(scratch2, HeapNumber::kMantissaMask);
512 // Put back the implicit 1.
513 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
514 // Shift up the mantissa bits to take up the space the exponent used to
515 // take. We just orred in the implicit bit so that took care of one and
516 // we want to use the full unsigned range so we subtract 1 bit from the
517 // shift distance.
518 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
519 __ shl(scratch2, big_shift_distance);
520 // Get the second half of the double.
521 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
522 // Shift down 21 bits to get the most significant 11 bits or the low
523 // mantissa word.
524 __ shr(ecx, 32 - big_shift_distance);
525 __ or_(ecx, Operand(scratch2));
526 // We have the answer in ecx, but we may need to negate it.
527 __ test(scratch, Operand(scratch));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000528 __ j(positive, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 __ neg(ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000530 __ jmp(&done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000531 }
532
533 __ bind(&normal_exponent);
534 // Exponent word in scratch, exponent part of exponent word in scratch2.
535 // Zero in ecx.
536 // We know the exponent is smaller than 30 (biased). If it is less than
537 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
538 // it rounds to zero.
539 const uint32_t zero_exponent =
540 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
541 __ sub(Operand(scratch2), Immediate(zero_exponent));
542 // ecx already has a Smi zero.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000543 __ j(less, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000544
545 // We have a shifted exponent between 0 and 30 in scratch2.
546 __ shr(scratch2, HeapNumber::kExponentShift);
547 __ mov(ecx, Immediate(30));
548 __ sub(ecx, Operand(scratch2));
549
550 __ bind(&right_exponent);
551 // Here ecx is the shift, scratch is the exponent word.
552 // Get the top bits of the mantissa.
553 __ and_(scratch, HeapNumber::kMantissaMask);
554 // Put back the implicit 1.
555 __ or_(scratch, 1 << HeapNumber::kExponentShift);
556 // Shift up the mantissa bits to take up the space the exponent used to
557 // take. We have kExponentShift + 1 significant bits int he low end of the
558 // word. Shift them to the top bits.
559 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
560 __ shl(scratch, shift_distance);
561 // Get the second half of the double. For some exponents we don't
562 // actually need this because the bits get shifted out again, but
563 // it's probably slower to test than just to do it.
564 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
565 // Shift down 22 bits to get the most significant 10 bits or the low
566 // mantissa word.
567 __ shr(scratch2, 32 - shift_distance);
568 __ or_(scratch2, Operand(scratch));
569 // Move down according to the exponent.
570 __ shr_cl(scratch2);
571 // Now the unsigned answer is in scratch2. We need to move it to ecx and
572 // we may need to fix the sign.
573 Label negative;
574 __ xor_(ecx, Operand(ecx));
575 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
576 __ j(greater, &negative, Label::kNear);
577 __ mov(ecx, scratch2);
578 __ jmp(&done, Label::kNear);
579 __ bind(&negative);
580 __ sub(ecx, Operand(scratch2));
581 __ bind(&done);
582 }
583}
584
585
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000586void UnaryOpStub::PrintName(StringStream* stream) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000587 const char* op_name = Token::Name(op_);
588 const char* overwrite_name = NULL; // Make g++ happy.
589 switch (mode_) {
590 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
591 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
592 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000593 stream->Add("UnaryOpStub_%s_%s_%s",
594 op_name,
595 overwrite_name,
596 UnaryOpIC::GetName(operand_type_));
Ben Murdoch257744e2011-11-30 15:57:28 +0000597}
598
599
600// TODO(svenpanne): Use virtual functions instead of switch.
601void UnaryOpStub::Generate(MacroAssembler* masm) {
602 switch (operand_type_) {
603 case UnaryOpIC::UNINITIALIZED:
604 GenerateTypeTransition(masm);
605 break;
606 case UnaryOpIC::SMI:
607 GenerateSmiStub(masm);
608 break;
609 case UnaryOpIC::HEAP_NUMBER:
610 GenerateHeapNumberStub(masm);
611 break;
612 case UnaryOpIC::GENERIC:
613 GenerateGenericStub(masm);
614 break;
615 }
616}
617
618
619void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
620 __ pop(ecx); // Save return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000621
622 __ push(eax); // the operand
Ben Murdoch257744e2011-11-30 15:57:28 +0000623 __ push(Immediate(Smi::FromInt(op_)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000624 __ push(Immediate(Smi::FromInt(mode_)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000625 __ push(Immediate(Smi::FromInt(operand_type_)));
626
627 __ push(ecx); // Push return address.
628
629 // Patch the caller to an appropriate specialized stub and return the
630 // operation result to the caller of the stub.
631 __ TailCallExternalReference(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000632 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000633}
634
635
636// TODO(svenpanne): Use virtual functions instead of switch.
637void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
638 switch (op_) {
639 case Token::SUB:
640 GenerateSmiStubSub(masm);
641 break;
642 case Token::BIT_NOT:
643 GenerateSmiStubBitNot(masm);
644 break;
645 default:
646 UNREACHABLE();
647 }
648}
649
650
651void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
652 Label non_smi, undo, slow;
653 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
654 Label::kNear, Label::kNear, Label::kNear);
655 __ bind(&undo);
656 GenerateSmiCodeUndo(masm);
657 __ bind(&non_smi);
658 __ bind(&slow);
659 GenerateTypeTransition(masm);
660}
661
662
663void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
664 Label non_smi;
665 GenerateSmiCodeBitNot(masm, &non_smi);
666 __ bind(&non_smi);
667 GenerateTypeTransition(masm);
668}
669
670
671void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
672 Label* non_smi,
673 Label* undo,
674 Label* slow,
675 Label::Distance non_smi_near,
676 Label::Distance undo_near,
677 Label::Distance slow_near) {
678 // Check whether the value is a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000679 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
Ben Murdoch257744e2011-11-30 15:57:28 +0000680
681 // We can't handle -0 with smis, so use a type transition for that case.
682 __ test(eax, Operand(eax));
683 __ j(zero, slow, slow_near);
684
685 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
686 __ mov(edx, Operand(eax));
687 __ Set(eax, Immediate(0));
688 __ sub(eax, Operand(edx));
689 __ j(overflow, undo, undo_near);
690 __ ret(0);
691}
692
693
694void UnaryOpStub::GenerateSmiCodeBitNot(
695 MacroAssembler* masm,
696 Label* non_smi,
697 Label::Distance non_smi_near) {
698 // Check whether the value is a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000699 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
Ben Murdoch257744e2011-11-30 15:57:28 +0000700
701 // Flip bits and revert inverted smi-tag.
702 __ not_(eax);
703 __ and_(eax, ~kSmiTagMask);
704 __ ret(0);
705}
706
707
708void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
709 __ mov(eax, Operand(edx));
710}
711
712
713// TODO(svenpanne): Use virtual functions instead of switch.
714void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
715 switch (op_) {
716 case Token::SUB:
717 GenerateHeapNumberStubSub(masm);
718 break;
719 case Token::BIT_NOT:
720 GenerateHeapNumberStubBitNot(masm);
721 break;
722 default:
723 UNREACHABLE();
724 }
725}
726
727
728void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
729 Label non_smi, undo, slow, call_builtin;
730 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
731 __ bind(&non_smi);
732 GenerateHeapNumberCodeSub(masm, &slow);
733 __ bind(&undo);
734 GenerateSmiCodeUndo(masm);
735 __ bind(&slow);
736 GenerateTypeTransition(masm);
737 __ bind(&call_builtin);
738 GenerateGenericCodeFallback(masm);
739}
740
741
742void UnaryOpStub::GenerateHeapNumberStubBitNot(
743 MacroAssembler* masm) {
744 Label non_smi, slow;
745 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
746 __ bind(&non_smi);
747 GenerateHeapNumberCodeBitNot(masm, &slow);
748 __ bind(&slow);
749 GenerateTypeTransition(masm);
750}
751
752
753void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
754 Label* slow) {
755 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
756 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
757 __ j(not_equal, slow);
758
759 if (mode_ == UNARY_OVERWRITE) {
760 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
761 Immediate(HeapNumber::kSignMask)); // Flip sign.
762 } else {
763 __ mov(edx, Operand(eax));
764 // edx: operand
765
766 Label slow_allocate_heapnumber, heapnumber_allocated;
767 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000768 __ jmp(&heapnumber_allocated, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000769
770 __ bind(&slow_allocate_heapnumber);
771 __ EnterInternalFrame();
772 __ push(edx);
773 __ CallRuntime(Runtime::kNumberAlloc, 0);
774 __ pop(edx);
775 __ LeaveInternalFrame();
776
777 __ bind(&heapnumber_allocated);
778 // eax: allocated 'empty' number
779 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
780 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
781 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
782 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
783 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
784 }
785 __ ret(0);
786}
787
788
789void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
790 Label* slow) {
791 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
792 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
793 __ j(not_equal, slow);
794
795 // Convert the heap number in eax to an untagged integer in ecx.
796 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
797
798 // Do the bitwise operation and check if the result fits in a smi.
799 Label try_float;
800 __ not_(ecx);
801 __ cmp(ecx, 0xc0000000);
802 __ j(sign, &try_float, Label::kNear);
803
804 // Tag the result as a smi and we're done.
805 STATIC_ASSERT(kSmiTagSize == 1);
806 __ lea(eax, Operand(ecx, times_2, kSmiTag));
807 __ ret(0);
808
809 // Try to store the result in a heap number.
810 __ bind(&try_float);
811 if (mode_ == UNARY_NO_OVERWRITE) {
812 Label slow_allocate_heapnumber, heapnumber_allocated;
813 __ mov(ebx, eax);
814 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
815 __ jmp(&heapnumber_allocated);
816
817 __ bind(&slow_allocate_heapnumber);
818 __ EnterInternalFrame();
819 // Push the original HeapNumber on the stack. The integer value can't
820 // be stored since it's untagged and not in the smi range (so we can't
821 // smi-tag it). We'll recalculate the value after the GC instead.
822 __ push(ebx);
823 __ CallRuntime(Runtime::kNumberAlloc, 0);
824 // New HeapNumber is in eax.
825 __ pop(edx);
826 __ LeaveInternalFrame();
827 // IntegerConvert uses ebx and edi as scratch registers.
828 // This conversion won't go slow-case.
829 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
830 __ not_(ecx);
831
832 __ bind(&heapnumber_allocated);
833 }
834 if (CpuFeatures::IsSupported(SSE2)) {
835 CpuFeatures::Scope use_sse2(SSE2);
836 __ cvtsi2sd(xmm0, Operand(ecx));
837 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
838 } else {
839 __ push(ecx);
840 __ fild_s(Operand(esp, 0));
841 __ pop(ecx);
842 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
843 }
844 __ ret(0);
845}
846
847
848// TODO(svenpanne): Use virtual functions instead of switch.
849void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
850 switch (op_) {
851 case Token::SUB:
852 GenerateGenericStubSub(masm);
853 break;
854 case Token::BIT_NOT:
855 GenerateGenericStubBitNot(masm);
856 break;
857 default:
858 UNREACHABLE();
859 }
860}
861
862
863void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
864 Label non_smi, undo, slow;
865 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
866 __ bind(&non_smi);
867 GenerateHeapNumberCodeSub(masm, &slow);
868 __ bind(&undo);
869 GenerateSmiCodeUndo(masm);
870 __ bind(&slow);
871 GenerateGenericCodeFallback(masm);
872}
873
874
875void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
876 Label non_smi, slow;
877 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
878 __ bind(&non_smi);
879 GenerateHeapNumberCodeBitNot(masm, &slow);
880 __ bind(&slow);
881 GenerateGenericCodeFallback(masm);
882}
883
884
885void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
886 // Handle the slow case by jumping to the corresponding JavaScript builtin.
887 __ pop(ecx); // pop return address.
888 __ push(eax);
889 __ push(ecx); // push return address
890 switch (op_) {
891 case Token::SUB:
892 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
893 break;
894 case Token::BIT_NOT:
895 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
896 break;
897 default:
898 UNREACHABLE();
899 }
900}
901
902
Ben Murdoch257744e2011-11-30 15:57:28 +0000903void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100904 __ pop(ecx); // Save return address.
905 __ push(edx);
906 __ push(eax);
907 // Left and right arguments are now on top.
908 // Push this stub's key. Although the operation and the type info are
909 // encoded into the key, the encoding is opaque, so push them too.
910 __ push(Immediate(Smi::FromInt(MinorKey())));
911 __ push(Immediate(Smi::FromInt(op_)));
912 __ push(Immediate(Smi::FromInt(operands_type_)));
913
914 __ push(ecx); // Push return address.
915
916 // Patch the caller to an appropriate specialized stub and return the
917 // operation result to the caller of the stub.
918 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000919 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100920 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100921 5,
922 1);
923}
924
925
926// Prepare for a type transition runtime call when the args are already on
927// the stack, under the return address.
Ben Murdoch257744e2011-11-30 15:57:28 +0000928void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100929 __ pop(ecx); // Save return address.
930 // Left and right arguments are already on top of the stack.
931 // Push this stub's key. Although the operation and the type info are
932 // encoded into the key, the encoding is opaque, so push them too.
933 __ push(Immediate(Smi::FromInt(MinorKey())));
934 __ push(Immediate(Smi::FromInt(op_)));
935 __ push(Immediate(Smi::FromInt(operands_type_)));
936
937 __ push(ecx); // Push return address.
938
939 // Patch the caller to an appropriate specialized stub and return the
940 // operation result to the caller of the stub.
941 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000942 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100943 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100944 5,
945 1);
946}
947
948
Ben Murdoch257744e2011-11-30 15:57:28 +0000949void BinaryOpStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100950 switch (operands_type_) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000951 case BinaryOpIC::UNINITIALIZED:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100952 GenerateTypeTransition(masm);
953 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000954 case BinaryOpIC::SMI:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100955 GenerateSmiStub(masm);
956 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000957 case BinaryOpIC::INT32:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100958 GenerateInt32Stub(masm);
959 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000960 case BinaryOpIC::HEAP_NUMBER:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100961 GenerateHeapNumberStub(masm);
962 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000963 case BinaryOpIC::ODDBALL:
Steve Block44f0eee2011-05-26 01:26:41 +0100964 GenerateOddballStub(masm);
965 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000966 case BinaryOpIC::BOTH_STRING:
967 GenerateBothStringStub(masm);
968 break;
969 case BinaryOpIC::STRING:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100970 GenerateStringStub(masm);
971 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000972 case BinaryOpIC::GENERIC:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100973 GenerateGeneric(masm);
974 break;
975 default:
976 UNREACHABLE();
977 }
978}
979
980
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000981void BinaryOpStub::PrintName(StringStream* stream) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100982 const char* op_name = Token::Name(op_);
983 const char* overwrite_name;
984 switch (mode_) {
985 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
986 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
987 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
988 default: overwrite_name = "UnknownOverwrite"; break;
989 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000990 stream->Add("BinaryOpStub_%s_%s_%s",
991 op_name,
992 overwrite_name,
993 BinaryOpIC::GetName(operands_type_));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100994}
995
996
Ben Murdoch257744e2011-11-30 15:57:28 +0000997void BinaryOpStub::GenerateSmiCode(
998 MacroAssembler* masm,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100999 Label* slow,
1000 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1001 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
1002 // dividend in eax and edx free for the division. Use eax, ebx for those.
1003 Comment load_comment(masm, "-- Load arguments");
1004 Register left = edx;
1005 Register right = eax;
1006 if (op_ == Token::DIV || op_ == Token::MOD) {
1007 left = eax;
1008 right = ebx;
1009 __ mov(ebx, eax);
1010 __ mov(eax, edx);
1011 }
1012
1013
1014 // 2. Prepare the smi check of both operands by oring them together.
1015 Comment smi_check_comment(masm, "-- Smi check arguments");
1016 Label not_smis;
1017 Register combined = ecx;
1018 ASSERT(!left.is(combined) && !right.is(combined));
1019 switch (op_) {
1020 case Token::BIT_OR:
1021 // Perform the operation into eax and smi check the result. Preserve
1022 // eax in case the result is not a smi.
1023 ASSERT(!left.is(ecx) && !right.is(ecx));
1024 __ mov(ecx, right);
1025 __ or_(right, Operand(left)); // Bitwise or is commutative.
1026 combined = right;
1027 break;
1028
1029 case Token::BIT_XOR:
1030 case Token::BIT_AND:
1031 case Token::ADD:
1032 case Token::SUB:
1033 case Token::MUL:
1034 case Token::DIV:
1035 case Token::MOD:
1036 __ mov(combined, right);
1037 __ or_(combined, Operand(left));
1038 break;
1039
1040 case Token::SHL:
1041 case Token::SAR:
1042 case Token::SHR:
1043 // Move the right operand into ecx for the shift operation, use eax
1044 // for the smi check register.
1045 ASSERT(!left.is(ecx) && !right.is(ecx));
1046 __ mov(ecx, right);
1047 __ or_(right, Operand(left));
1048 combined = right;
1049 break;
1050
1051 default:
1052 break;
1053 }
1054
1055 // 3. Perform the smi check of the operands.
1056 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001057 __ JumpIfNotSmi(combined, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001058
1059 // 4. Operands are both smis, perform the operation leaving the result in
1060 // eax and check the result if necessary.
1061 Comment perform_smi(masm, "-- Perform smi operation");
1062 Label use_fp_on_smis;
1063 switch (op_) {
1064 case Token::BIT_OR:
1065 // Nothing to do.
1066 break;
1067
1068 case Token::BIT_XOR:
1069 ASSERT(right.is(eax));
1070 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
1071 break;
1072
1073 case Token::BIT_AND:
1074 ASSERT(right.is(eax));
1075 __ and_(right, Operand(left)); // Bitwise and is commutative.
1076 break;
1077
1078 case Token::SHL:
1079 // Remove tags from operands (but keep sign).
1080 __ SmiUntag(left);
1081 __ SmiUntag(ecx);
1082 // Perform the operation.
1083 __ shl_cl(left);
1084 // Check that the *signed* result fits in a smi.
1085 __ cmp(left, 0xc0000000);
Ben Murdoch257744e2011-11-30 15:57:28 +00001086 __ j(sign, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001087 // Tag the result and store it in register eax.
1088 __ SmiTag(left);
1089 __ mov(eax, left);
1090 break;
1091
1092 case Token::SAR:
1093 // Remove tags from operands (but keep sign).
1094 __ SmiUntag(left);
1095 __ SmiUntag(ecx);
1096 // Perform the operation.
1097 __ sar_cl(left);
1098 // Tag the result and store it in register eax.
1099 __ SmiTag(left);
1100 __ mov(eax, left);
1101 break;
1102
1103 case Token::SHR:
1104 // Remove tags from operands (but keep sign).
1105 __ SmiUntag(left);
1106 __ SmiUntag(ecx);
1107 // Perform the operation.
1108 __ shr_cl(left);
1109 // Check that the *unsigned* result fits in a smi.
1110 // Neither of the two high-order bits can be set:
1111 // - 0x80000000: high bit would be lost when smi tagging.
1112 // - 0x40000000: this number would convert to negative when
1113 // Smi tagging these two cases can only happen with shifts
1114 // by 0 or 1 when handed a valid smi.
1115 __ test(left, Immediate(0xc0000000));
Ben Murdoch257744e2011-11-30 15:57:28 +00001116 __ j(not_zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001117 // Tag the result and store it in register eax.
1118 __ SmiTag(left);
1119 __ mov(eax, left);
1120 break;
1121
1122 case Token::ADD:
1123 ASSERT(right.is(eax));
1124 __ add(right, Operand(left)); // Addition is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001125 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 break;
1127
1128 case Token::SUB:
1129 __ sub(left, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001130 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001131 __ mov(eax, left);
1132 break;
1133
1134 case Token::MUL:
1135 // If the smi tag is 0 we can just leave the tag on one operand.
1136 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1137 // We can't revert the multiplication if the result is not a smi
1138 // so save the right operand.
1139 __ mov(ebx, right);
1140 // Remove tag from one of the operands (but keep sign).
1141 __ SmiUntag(right);
1142 // Do multiplication.
1143 __ imul(right, Operand(left)); // Multiplication is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001144 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001145 // Check for negative zero result. Use combined = left | right.
1146 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1147 break;
1148
1149 case Token::DIV:
1150 // We can't revert the division if the result is not a smi so
1151 // save the left operand.
1152 __ mov(edi, left);
1153 // Check for 0 divisor.
1154 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001155 __ j(zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 // Sign extend left into edx:eax.
1157 ASSERT(left.is(eax));
1158 __ cdq();
1159 // Divide edx:eax by right.
1160 __ idiv(right);
1161 // Check for the corner case of dividing the most negative smi by
1162 // -1. We cannot use the overflow flag, since it is not set by idiv
1163 // instruction.
1164 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1165 __ cmp(eax, 0x40000000);
1166 __ j(equal, &use_fp_on_smis);
1167 // Check for negative zero result. Use combined = left | right.
1168 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1169 // Check that the remainder is zero.
1170 __ test(edx, Operand(edx));
1171 __ j(not_zero, &use_fp_on_smis);
1172 // Tag the result and store it in register eax.
1173 __ SmiTag(eax);
1174 break;
1175
1176 case Token::MOD:
1177 // Check for 0 divisor.
1178 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001179 __ j(zero, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001180
1181 // Sign extend left into edx:eax.
1182 ASSERT(left.is(eax));
1183 __ cdq();
1184 // Divide edx:eax by right.
1185 __ idiv(right);
1186 // Check for negative zero result. Use combined = left | right.
1187 __ NegativeZeroTest(edx, combined, slow);
1188 // Move remainder to register eax.
1189 __ mov(eax, edx);
1190 break;
1191
1192 default:
1193 UNREACHABLE();
1194 }
1195
1196 // 5. Emit return of result in eax. Some operations have registers pushed.
1197 switch (op_) {
1198 case Token::ADD:
1199 case Token::SUB:
1200 case Token::MUL:
1201 case Token::DIV:
1202 __ ret(0);
1203 break;
1204 case Token::MOD:
1205 case Token::BIT_OR:
1206 case Token::BIT_AND:
1207 case Token::BIT_XOR:
1208 case Token::SAR:
1209 case Token::SHL:
1210 case Token::SHR:
1211 __ ret(2 * kPointerSize);
1212 break;
1213 default:
1214 UNREACHABLE();
1215 }
1216
1217 // 6. For some operations emit inline code to perform floating point
1218 // operations on known smis (e.g., if the result of the operation
1219 // overflowed the smi range).
1220 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1221 __ bind(&use_fp_on_smis);
1222 switch (op_) {
1223 // Undo the effects of some operations, and some register moves.
1224 case Token::SHL:
1225 // The arguments are saved on the stack, and only used from there.
1226 break;
1227 case Token::ADD:
1228 // Revert right = right + left.
1229 __ sub(right, Operand(left));
1230 break;
1231 case Token::SUB:
1232 // Revert left = left - right.
1233 __ add(left, Operand(right));
1234 break;
1235 case Token::MUL:
1236 // Right was clobbered but a copy is in ebx.
1237 __ mov(right, ebx);
1238 break;
1239 case Token::DIV:
1240 // Left was clobbered but a copy is in edi. Right is in ebx for
1241 // division. They should be in eax, ebx for jump to not_smi.
1242 __ mov(eax, edi);
1243 break;
1244 default:
1245 // No other operators jump to use_fp_on_smis.
1246 break;
1247 }
1248 __ jmp(&not_smis);
1249 } else {
1250 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1251 switch (op_) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001252 case Token::SHL:
1253 case Token::SHR: {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001254 Comment perform_float(masm, "-- Perform float operation on smis");
1255 __ bind(&use_fp_on_smis);
1256 // Result we want is in left == edx, so we can put the allocated heap
1257 // number in eax.
1258 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1259 // Store the result in the HeapNumber and return.
Ben Murdoch257744e2011-11-30 15:57:28 +00001260 // It's OK to overwrite the arguments on the stack because we
1261 // are about to return.
1262 if (op_ == Token::SHR) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001263 __ mov(Operand(esp, 1 * kPointerSize), left);
Ben Murdoch257744e2011-11-30 15:57:28 +00001264 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0));
1265 __ fild_d(Operand(esp, 1 * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001266 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001267 } else {
1268 ASSERT_EQ(Token::SHL, op_);
1269 if (CpuFeatures::IsSupported(SSE2)) {
1270 CpuFeatures::Scope use_sse2(SSE2);
1271 __ cvtsi2sd(xmm0, Operand(left));
1272 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1273 } else {
1274 __ mov(Operand(esp, 1 * kPointerSize), left);
1275 __ fild_s(Operand(esp, 1 * kPointerSize));
1276 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1277 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001278 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001279 __ ret(2 * kPointerSize);
1280 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001281 }
1282
1283 case Token::ADD:
1284 case Token::SUB:
1285 case Token::MUL:
1286 case Token::DIV: {
1287 Comment perform_float(masm, "-- Perform float operation on smis");
1288 __ bind(&use_fp_on_smis);
1289 // Restore arguments to edx, eax.
1290 switch (op_) {
1291 case Token::ADD:
1292 // Revert right = right + left.
1293 __ sub(right, Operand(left));
1294 break;
1295 case Token::SUB:
1296 // Revert left = left - right.
1297 __ add(left, Operand(right));
1298 break;
1299 case Token::MUL:
1300 // Right was clobbered but a copy is in ebx.
1301 __ mov(right, ebx);
1302 break;
1303 case Token::DIV:
1304 // Left was clobbered but a copy is in edi. Right is in ebx for
1305 // division.
1306 __ mov(edx, edi);
1307 __ mov(eax, right);
1308 break;
1309 default: UNREACHABLE();
1310 break;
1311 }
1312 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001313 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001314 CpuFeatures::Scope use_sse2(SSE2);
1315 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1316 switch (op_) {
1317 case Token::ADD: __ addsd(xmm0, xmm1); break;
1318 case Token::SUB: __ subsd(xmm0, xmm1); break;
1319 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1320 case Token::DIV: __ divsd(xmm0, xmm1); break;
1321 default: UNREACHABLE();
1322 }
1323 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
1324 } else { // SSE2 not available, use FPU.
1325 FloatingPointHelper::LoadFloatSmis(masm, ebx);
1326 switch (op_) {
1327 case Token::ADD: __ faddp(1); break;
1328 case Token::SUB: __ fsubp(1); break;
1329 case Token::MUL: __ fmulp(1); break;
1330 case Token::DIV: __ fdivp(1); break;
1331 default: UNREACHABLE();
1332 }
1333 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
1334 }
1335 __ mov(eax, ecx);
1336 __ ret(0);
1337 break;
1338 }
1339
1340 default:
1341 break;
1342 }
1343 }
1344
1345 // 7. Non-smi operands, fall out to the non-smi code with the operands in
1346 // edx and eax.
1347 Comment done_comment(masm, "-- Enter non-smi code");
1348 __ bind(&not_smis);
1349 switch (op_) {
1350 case Token::BIT_OR:
1351 case Token::SHL:
1352 case Token::SAR:
1353 case Token::SHR:
1354 // Right operand is saved in ecx and eax was destroyed by the smi
1355 // check.
1356 __ mov(eax, ecx);
1357 break;
1358
1359 case Token::DIV:
1360 case Token::MOD:
1361 // Operands are in eax, ebx at this point.
1362 __ mov(edx, eax);
1363 __ mov(eax, ebx);
1364 break;
1365
1366 default:
1367 break;
1368 }
1369}
1370
1371
Ben Murdoch257744e2011-11-30 15:57:28 +00001372void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001373 Label call_runtime;
1374
1375 switch (op_) {
1376 case Token::ADD:
1377 case Token::SUB:
1378 case Token::MUL:
1379 case Token::DIV:
1380 break;
1381 case Token::MOD:
1382 case Token::BIT_OR:
1383 case Token::BIT_AND:
1384 case Token::BIT_XOR:
1385 case Token::SAR:
1386 case Token::SHL:
1387 case Token::SHR:
1388 GenerateRegisterArgsPush(masm);
1389 break;
1390 default:
1391 UNREACHABLE();
1392 }
1393
Ben Murdoch257744e2011-11-30 15:57:28 +00001394 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1395 result_type_ == BinaryOpIC::SMI) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001396 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1397 } else {
1398 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1399 }
1400 __ bind(&call_runtime);
1401 switch (op_) {
1402 case Token::ADD:
1403 case Token::SUB:
1404 case Token::MUL:
1405 case Token::DIV:
1406 GenerateTypeTransition(masm);
1407 break;
1408 case Token::MOD:
1409 case Token::BIT_OR:
1410 case Token::BIT_AND:
1411 case Token::BIT_XOR:
1412 case Token::SAR:
1413 case Token::SHL:
1414 case Token::SHR:
1415 GenerateTypeTransitionWithSavedArgs(masm);
1416 break;
1417 default:
1418 UNREACHABLE();
1419 }
1420}
1421
1422
Ben Murdoch257744e2011-11-30 15:57:28 +00001423void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1424 ASSERT(operands_type_ == BinaryOpIC::STRING);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001425 ASSERT(op_ == Token::ADD);
Steve Block1e0659c2011-05-24 12:43:12 +01001426 // Try to add arguments as strings, otherwise, transition to the generic
Ben Murdoch257744e2011-11-30 15:57:28 +00001427 // BinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001428 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001429 GenerateTypeTransition(masm);
1430}
1431
1432
Ben Murdoch257744e2011-11-30 15:57:28 +00001433void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434 Label call_runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00001435 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1436 ASSERT(op_ == Token::ADD);
1437 // If both arguments are strings, call the string add stub.
1438 // Otherwise, do a transition.
1439
1440 // Registers containing left and right operands respectively.
1441 Register left = edx;
1442 Register right = eax;
1443
1444 // Test if left operand is a string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001445 __ JumpIfSmi(left, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001446 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001447 __ j(above_equal, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001448
1449 // Test if right operand is a string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001450 __ JumpIfSmi(right, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001451 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001452 __ j(above_equal, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001453
1454 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1455 GenerateRegisterArgsPush(masm);
1456 __ TailCallStub(&string_add_stub);
1457
1458 __ bind(&call_runtime);
1459 GenerateTypeTransition(masm);
1460}
1461
1462
1463void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1464 Label call_runtime;
1465 ASSERT(operands_type_ == BinaryOpIC::INT32);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466
1467 // Floating point case.
1468 switch (op_) {
1469 case Token::ADD:
1470 case Token::SUB:
1471 case Token::MUL:
1472 case Token::DIV: {
1473 Label not_floats;
1474 Label not_int32;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001475 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 CpuFeatures::Scope use_sse2(SSE2);
1477 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1478 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1479 switch (op_) {
1480 case Token::ADD: __ addsd(xmm0, xmm1); break;
1481 case Token::SUB: __ subsd(xmm0, xmm1); break;
1482 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1483 case Token::DIV: __ divsd(xmm0, xmm1); break;
1484 default: UNREACHABLE();
1485 }
1486 // Check result type if it is currently Int32.
Ben Murdoch257744e2011-11-30 15:57:28 +00001487 if (result_type_ <= BinaryOpIC::INT32) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001488 __ cvttsd2si(ecx, Operand(xmm0));
1489 __ cvtsi2sd(xmm2, Operand(ecx));
1490 __ ucomisd(xmm0, xmm2);
1491 __ j(not_zero, &not_int32);
1492 __ j(carry, &not_int32);
1493 }
1494 GenerateHeapResultAllocation(masm, &call_runtime);
1495 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1496 __ ret(0);
1497 } else { // SSE2 not available, use FPU.
1498 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1499 FloatingPointHelper::LoadFloatOperands(
1500 masm,
1501 ecx,
1502 FloatingPointHelper::ARGS_IN_REGISTERS);
1503 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
1504 switch (op_) {
1505 case Token::ADD: __ faddp(1); break;
1506 case Token::SUB: __ fsubp(1); break;
1507 case Token::MUL: __ fmulp(1); break;
1508 case Token::DIV: __ fdivp(1); break;
1509 default: UNREACHABLE();
1510 }
1511 Label after_alloc_failure;
1512 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1513 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1514 __ ret(0);
1515 __ bind(&after_alloc_failure);
1516 __ ffree();
1517 __ jmp(&call_runtime);
1518 }
1519
1520 __ bind(&not_floats);
1521 __ bind(&not_int32);
1522 GenerateTypeTransition(masm);
1523 break;
1524 }
1525
1526 case Token::MOD: {
1527 // For MOD we go directly to runtime in the non-smi case.
1528 break;
1529 }
1530 case Token::BIT_OR:
1531 case Token::BIT_AND:
1532 case Token::BIT_XOR:
1533 case Token::SAR:
1534 case Token::SHL:
1535 case Token::SHR: {
1536 GenerateRegisterArgsPush(masm);
1537 Label not_floats;
1538 Label not_int32;
1539 Label non_smi_result;
1540 /* {
1541 CpuFeatures::Scope use_sse2(SSE2);
1542 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1543 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1544 }*/
1545 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1546 use_sse3_,
1547 &not_floats);
1548 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1549 &not_int32);
1550 switch (op_) {
1551 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1552 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1553 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1554 case Token::SAR: __ sar_cl(eax); break;
1555 case Token::SHL: __ shl_cl(eax); break;
1556 case Token::SHR: __ shr_cl(eax); break;
1557 default: UNREACHABLE();
1558 }
1559 if (op_ == Token::SHR) {
1560 // Check if result is non-negative and fits in a smi.
1561 __ test(eax, Immediate(0xc0000000));
1562 __ j(not_zero, &call_runtime);
1563 } else {
1564 // Check if result fits in a smi.
1565 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001566 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001567 }
1568 // Tag smi result and return.
1569 __ SmiTag(eax);
1570 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1571
1572 // All ops except SHR return a signed int32 that we load in
1573 // a HeapNumber.
1574 if (op_ != Token::SHR) {
1575 __ bind(&non_smi_result);
1576 // Allocate a heap number if needed.
1577 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001578 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001579 switch (mode_) {
1580 case OVERWRITE_LEFT:
1581 case OVERWRITE_RIGHT:
1582 // If the operand was an object, we skip the
1583 // allocation of a heap number.
1584 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1585 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001586 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001587 // Fall through!
1588 case NO_OVERWRITE:
1589 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1590 __ bind(&skip_allocation);
1591 break;
1592 default: UNREACHABLE();
1593 }
1594 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001595 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001596 CpuFeatures::Scope use_sse2(SSE2);
1597 __ cvtsi2sd(xmm0, Operand(ebx));
1598 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1599 } else {
1600 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1601 __ fild_s(Operand(esp, 1 * kPointerSize));
1602 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1603 }
1604 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1605 }
1606
1607 __ bind(&not_floats);
1608 __ bind(&not_int32);
1609 GenerateTypeTransitionWithSavedArgs(masm);
1610 break;
1611 }
1612 default: UNREACHABLE(); break;
1613 }
1614
1615 // If an allocation fails, or SHR or MOD hit a hard case,
1616 // use the runtime system to get the correct result.
1617 __ bind(&call_runtime);
1618
1619 switch (op_) {
1620 case Token::ADD:
1621 GenerateRegisterArgsPush(masm);
1622 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1623 break;
1624 case Token::SUB:
1625 GenerateRegisterArgsPush(masm);
1626 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1627 break;
1628 case Token::MUL:
1629 GenerateRegisterArgsPush(masm);
1630 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1631 break;
1632 case Token::DIV:
1633 GenerateRegisterArgsPush(masm);
1634 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1635 break;
1636 case Token::MOD:
1637 GenerateRegisterArgsPush(masm);
1638 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1639 break;
1640 case Token::BIT_OR:
1641 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1642 break;
1643 case Token::BIT_AND:
1644 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1645 break;
1646 case Token::BIT_XOR:
1647 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1648 break;
1649 case Token::SAR:
1650 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1651 break;
1652 case Token::SHL:
1653 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1654 break;
1655 case Token::SHR:
1656 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1657 break;
1658 default:
1659 UNREACHABLE();
1660 }
1661}
1662
1663
Ben Murdoch257744e2011-11-30 15:57:28 +00001664void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
Steve Block44f0eee2011-05-26 01:26:41 +01001665 if (op_ == Token::ADD) {
1666 // Handle string addition here, because it is the only operation
1667 // that does not do a ToNumber conversion on the operands.
1668 GenerateAddStrings(masm);
1669 }
1670
Ben Murdoch257744e2011-11-30 15:57:28 +00001671 Factory* factory = masm->isolate()->factory();
1672
Steve Block44f0eee2011-05-26 01:26:41 +01001673 // Convert odd ball arguments to numbers.
Ben Murdoch257744e2011-11-30 15:57:28 +00001674 Label check, done;
1675 __ cmp(edx, factory->undefined_value());
1676 __ j(not_equal, &check, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001677 if (Token::IsBitOp(op_)) {
1678 __ xor_(edx, Operand(edx));
1679 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001680 __ mov(edx, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001681 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001682 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001683 __ bind(&check);
Ben Murdoch257744e2011-11-30 15:57:28 +00001684 __ cmp(eax, factory->undefined_value());
1685 __ j(not_equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001686 if (Token::IsBitOp(op_)) {
1687 __ xor_(eax, Operand(eax));
1688 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001689 __ mov(eax, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001690 }
1691 __ bind(&done);
1692
1693 GenerateHeapNumberStub(masm);
1694}
1695
1696
Ben Murdoch257744e2011-11-30 15:57:28 +00001697void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001698 Label call_runtime;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001699
1700 // Floating point case.
1701 switch (op_) {
1702 case Token::ADD:
1703 case Token::SUB:
1704 case Token::MUL:
1705 case Token::DIV: {
1706 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001707 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001708 CpuFeatures::Scope use_sse2(SSE2);
1709 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1710
1711 switch (op_) {
1712 case Token::ADD: __ addsd(xmm0, xmm1); break;
1713 case Token::SUB: __ subsd(xmm0, xmm1); break;
1714 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1715 case Token::DIV: __ divsd(xmm0, xmm1); break;
1716 default: UNREACHABLE();
1717 }
1718 GenerateHeapResultAllocation(masm, &call_runtime);
1719 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1720 __ ret(0);
1721 } else { // SSE2 not available, use FPU.
1722 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1723 FloatingPointHelper::LoadFloatOperands(
1724 masm,
1725 ecx,
1726 FloatingPointHelper::ARGS_IN_REGISTERS);
1727 switch (op_) {
1728 case Token::ADD: __ faddp(1); break;
1729 case Token::SUB: __ fsubp(1); break;
1730 case Token::MUL: __ fmulp(1); break;
1731 case Token::DIV: __ fdivp(1); break;
1732 default: UNREACHABLE();
1733 }
1734 Label after_alloc_failure;
1735 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1736 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1737 __ ret(0);
1738 __ bind(&after_alloc_failure);
1739 __ ffree();
1740 __ jmp(&call_runtime);
1741 }
1742
1743 __ bind(&not_floats);
1744 GenerateTypeTransition(masm);
1745 break;
1746 }
1747
1748 case Token::MOD: {
1749 // For MOD we go directly to runtime in the non-smi case.
1750 break;
1751 }
1752 case Token::BIT_OR:
1753 case Token::BIT_AND:
1754 case Token::BIT_XOR:
1755 case Token::SAR:
1756 case Token::SHL:
1757 case Token::SHR: {
1758 GenerateRegisterArgsPush(masm);
1759 Label not_floats;
1760 Label non_smi_result;
1761 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1762 use_sse3_,
1763 &not_floats);
1764 switch (op_) {
1765 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1766 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1767 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1768 case Token::SAR: __ sar_cl(eax); break;
1769 case Token::SHL: __ shl_cl(eax); break;
1770 case Token::SHR: __ shr_cl(eax); break;
1771 default: UNREACHABLE();
1772 }
1773 if (op_ == Token::SHR) {
1774 // Check if result is non-negative and fits in a smi.
1775 __ test(eax, Immediate(0xc0000000));
1776 __ j(not_zero, &call_runtime);
1777 } else {
1778 // Check if result fits in a smi.
1779 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001780 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001781 }
1782 // Tag smi result and return.
1783 __ SmiTag(eax);
1784 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1785
1786 // All ops except SHR return a signed int32 that we load in
1787 // a HeapNumber.
1788 if (op_ != Token::SHR) {
1789 __ bind(&non_smi_result);
1790 // Allocate a heap number if needed.
1791 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001792 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001793 switch (mode_) {
1794 case OVERWRITE_LEFT:
1795 case OVERWRITE_RIGHT:
1796 // If the operand was an object, we skip the
1797 // allocation of a heap number.
1798 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1799 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001800 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001801 // Fall through!
1802 case NO_OVERWRITE:
1803 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1804 __ bind(&skip_allocation);
1805 break;
1806 default: UNREACHABLE();
1807 }
1808 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001809 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001810 CpuFeatures::Scope use_sse2(SSE2);
1811 __ cvtsi2sd(xmm0, Operand(ebx));
1812 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1813 } else {
1814 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1815 __ fild_s(Operand(esp, 1 * kPointerSize));
1816 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1817 }
1818 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1819 }
1820
1821 __ bind(&not_floats);
1822 GenerateTypeTransitionWithSavedArgs(masm);
1823 break;
1824 }
1825 default: UNREACHABLE(); break;
1826 }
1827
1828 // If an allocation fails, or SHR or MOD hit a hard case,
1829 // use the runtime system to get the correct result.
1830 __ bind(&call_runtime);
1831
1832 switch (op_) {
1833 case Token::ADD:
1834 GenerateRegisterArgsPush(masm);
1835 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1836 break;
1837 case Token::SUB:
1838 GenerateRegisterArgsPush(masm);
1839 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1840 break;
1841 case Token::MUL:
1842 GenerateRegisterArgsPush(masm);
1843 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1844 break;
1845 case Token::DIV:
1846 GenerateRegisterArgsPush(masm);
1847 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1848 break;
1849 case Token::MOD:
1850 GenerateRegisterArgsPush(masm);
1851 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1852 break;
1853 case Token::BIT_OR:
1854 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1855 break;
1856 case Token::BIT_AND:
1857 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1858 break;
1859 case Token::BIT_XOR:
1860 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1861 break;
1862 case Token::SAR:
1863 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1864 break;
1865 case Token::SHL:
1866 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1867 break;
1868 case Token::SHR:
1869 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1870 break;
1871 default:
1872 UNREACHABLE();
1873 }
1874}
1875
1876
Ben Murdoch257744e2011-11-30 15:57:28 +00001877void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001878 Label call_runtime;
1879
Steve Block44f0eee2011-05-26 01:26:41 +01001880 Counters* counters = masm->isolate()->counters();
1881 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001882
1883 switch (op_) {
1884 case Token::ADD:
1885 case Token::SUB:
1886 case Token::MUL:
1887 case Token::DIV:
1888 break;
1889 case Token::MOD:
1890 case Token::BIT_OR:
1891 case Token::BIT_AND:
1892 case Token::BIT_XOR:
1893 case Token::SAR:
1894 case Token::SHL:
1895 case Token::SHR:
1896 GenerateRegisterArgsPush(masm);
1897 break;
1898 default:
1899 UNREACHABLE();
1900 }
1901
1902 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1903
1904 // Floating point case.
1905 switch (op_) {
1906 case Token::ADD:
1907 case Token::SUB:
1908 case Token::MUL:
1909 case Token::DIV: {
1910 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001911 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001912 CpuFeatures::Scope use_sse2(SSE2);
1913 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1914
1915 switch (op_) {
1916 case Token::ADD: __ addsd(xmm0, xmm1); break;
1917 case Token::SUB: __ subsd(xmm0, xmm1); break;
1918 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1919 case Token::DIV: __ divsd(xmm0, xmm1); break;
1920 default: UNREACHABLE();
1921 }
1922 GenerateHeapResultAllocation(masm, &call_runtime);
1923 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1924 __ ret(0);
1925 } else { // SSE2 not available, use FPU.
1926 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1927 FloatingPointHelper::LoadFloatOperands(
1928 masm,
1929 ecx,
1930 FloatingPointHelper::ARGS_IN_REGISTERS);
1931 switch (op_) {
1932 case Token::ADD: __ faddp(1); break;
1933 case Token::SUB: __ fsubp(1); break;
1934 case Token::MUL: __ fmulp(1); break;
1935 case Token::DIV: __ fdivp(1); break;
1936 default: UNREACHABLE();
1937 }
1938 Label after_alloc_failure;
1939 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1940 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1941 __ ret(0);
1942 __ bind(&after_alloc_failure);
1943 __ ffree();
1944 __ jmp(&call_runtime);
1945 }
1946 __ bind(&not_floats);
1947 break;
1948 }
1949 case Token::MOD: {
1950 // For MOD we go directly to runtime in the non-smi case.
1951 break;
1952 }
1953 case Token::BIT_OR:
1954 case Token::BIT_AND:
1955 case Token::BIT_XOR:
1956 case Token::SAR:
1957 case Token::SHL:
1958 case Token::SHR: {
1959 Label non_smi_result;
1960 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1961 use_sse3_,
1962 &call_runtime);
1963 switch (op_) {
1964 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1965 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1966 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
1967 case Token::SAR: __ sar_cl(eax); break;
1968 case Token::SHL: __ shl_cl(eax); break;
1969 case Token::SHR: __ shr_cl(eax); break;
1970 default: UNREACHABLE();
1971 }
1972 if (op_ == Token::SHR) {
1973 // Check if result is non-negative and fits in a smi.
1974 __ test(eax, Immediate(0xc0000000));
1975 __ j(not_zero, &call_runtime);
1976 } else {
1977 // Check if result fits in a smi.
1978 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001979 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001980 }
1981 // Tag smi result and return.
1982 __ SmiTag(eax);
1983 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
1984
1985 // All ops except SHR return a signed int32 that we load in
1986 // a HeapNumber.
1987 if (op_ != Token::SHR) {
1988 __ bind(&non_smi_result);
1989 // Allocate a heap number if needed.
1990 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001991 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001992 switch (mode_) {
1993 case OVERWRITE_LEFT:
1994 case OVERWRITE_RIGHT:
1995 // If the operand was an object, we skip the
1996 // allocation of a heap number.
1997 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1998 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001999 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002000 // Fall through!
2001 case NO_OVERWRITE:
2002 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2003 __ bind(&skip_allocation);
2004 break;
2005 default: UNREACHABLE();
2006 }
2007 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002008 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002009 CpuFeatures::Scope use_sse2(SSE2);
2010 __ cvtsi2sd(xmm0, Operand(ebx));
2011 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2012 } else {
2013 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2014 __ fild_s(Operand(esp, 1 * kPointerSize));
2015 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2016 }
2017 __ ret(2 * kPointerSize);
2018 }
2019 break;
2020 }
2021 default: UNREACHABLE(); break;
2022 }
2023
2024 // If all else fails, use the runtime system to get the correct
2025 // result.
2026 __ bind(&call_runtime);
2027 switch (op_) {
2028 case Token::ADD: {
Steve Block1e0659c2011-05-24 12:43:12 +01002029 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002030 GenerateRegisterArgsPush(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002031 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2032 break;
2033 }
2034 case Token::SUB:
2035 GenerateRegisterArgsPush(masm);
2036 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2037 break;
2038 case Token::MUL:
2039 GenerateRegisterArgsPush(masm);
2040 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2041 break;
2042 case Token::DIV:
2043 GenerateRegisterArgsPush(masm);
2044 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2045 break;
2046 case Token::MOD:
2047 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2048 break;
2049 case Token::BIT_OR:
2050 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2051 break;
2052 case Token::BIT_AND:
2053 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2054 break;
2055 case Token::BIT_XOR:
2056 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2057 break;
2058 case Token::SAR:
2059 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2060 break;
2061 case Token::SHL:
2062 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2063 break;
2064 case Token::SHR:
2065 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2066 break;
2067 default:
2068 UNREACHABLE();
2069 }
2070}
2071
2072
Ben Murdoch257744e2011-11-30 15:57:28 +00002073void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002074 ASSERT(op_ == Token::ADD);
Ben Murdoch257744e2011-11-30 15:57:28 +00002075 Label left_not_string, call_runtime;
Steve Block1e0659c2011-05-24 12:43:12 +01002076
2077 // Registers containing left and right operands respectively.
2078 Register left = edx;
2079 Register right = eax;
2080
2081 // Test if left operand is a string.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002082 __ JumpIfSmi(left, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002083 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002084 __ j(above_equal, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002085
2086 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2087 GenerateRegisterArgsPush(masm);
2088 __ TailCallStub(&string_add_left_stub);
2089
2090 // Left operand is not a string, test right.
2091 __ bind(&left_not_string);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002092 __ JumpIfSmi(right, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002093 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002094 __ j(above_equal, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002095
2096 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2097 GenerateRegisterArgsPush(masm);
2098 __ TailCallStub(&string_add_right_stub);
2099
2100 // Neither argument is a string.
2101 __ bind(&call_runtime);
2102}
2103
2104
Ben Murdoch257744e2011-11-30 15:57:28 +00002105void BinaryOpStub::GenerateHeapResultAllocation(
Ben Murdochb0fe1622011-05-05 13:52:32 +01002106 MacroAssembler* masm,
2107 Label* alloc_failure) {
2108 Label skip_allocation;
2109 OverwriteMode mode = mode_;
2110 switch (mode) {
2111 case OVERWRITE_LEFT: {
2112 // If the argument in edx is already an object, we skip the
2113 // allocation of a heap number.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002114 __ JumpIfNotSmi(edx, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002115 // Allocate a heap number for the result. Keep eax and edx intact
2116 // for the possible runtime call.
2117 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2118 // Now edx can be overwritten losing one of the arguments as we are
2119 // now done and will not need it any more.
2120 __ mov(edx, Operand(ebx));
2121 __ bind(&skip_allocation);
2122 // Use object in edx as a result holder
2123 __ mov(eax, Operand(edx));
2124 break;
2125 }
2126 case OVERWRITE_RIGHT:
2127 // If the argument in eax is already an object, we skip the
2128 // allocation of a heap number.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002129 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002130 // Fall through!
2131 case NO_OVERWRITE:
2132 // Allocate a heap number for the result. Keep eax and edx intact
2133 // for the possible runtime call.
2134 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2135 // Now eax can be overwritten losing one of the arguments as we are
2136 // now done and will not need it any more.
2137 __ mov(eax, ebx);
2138 __ bind(&skip_allocation);
2139 break;
2140 default: UNREACHABLE();
2141 }
2142}
2143
2144
Ben Murdoch257744e2011-11-30 15:57:28 +00002145void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002146 __ pop(ecx);
2147 __ push(edx);
2148 __ push(eax);
2149 __ push(ecx);
2150}
2151
2152
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002153void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002154 // TAGGED case:
2155 // Input:
2156 // esp[4]: tagged number input argument (should be number).
2157 // esp[0]: return address.
2158 // Output:
2159 // eax: tagged double result.
2160 // UNTAGGED case:
2161 // Input::
2162 // esp[0]: return address.
2163 // xmm1: untagged double input argument
2164 // Output:
2165 // xmm1: untagged double result.
2166
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002167 Label runtime_call;
2168 Label runtime_call_clear_stack;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002169 Label skip_cache;
2170 const bool tagged = (argument_type_ == TAGGED);
2171 if (tagged) {
2172 // Test that eax is a number.
Ben Murdoch257744e2011-11-30 15:57:28 +00002173 Label input_not_smi;
2174 Label loaded;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002175 __ mov(eax, Operand(esp, kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002176 __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002177 // Input is a smi. Untag and load it onto the FPU stack.
2178 // Then load the low and high words of the double into ebx, edx.
2179 STATIC_ASSERT(kSmiTagSize == 1);
2180 __ sar(eax, 1);
2181 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2182 __ mov(Operand(esp, 0), eax);
2183 __ fild_s(Operand(esp, 0));
2184 __ fst_d(Operand(esp, 0));
2185 __ pop(edx);
2186 __ pop(ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002187 __ jmp(&loaded, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002188 __ bind(&input_not_smi);
2189 // Check if input is a HeapNumber.
2190 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002191 Factory* factory = masm->isolate()->factory();
2192 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002193 __ j(not_equal, &runtime_call);
2194 // Input is a HeapNumber. Push it on the FPU stack and load its
2195 // low and high words into ebx, edx.
2196 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2197 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2198 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002199
Ben Murdochb0fe1622011-05-05 13:52:32 +01002200 __ bind(&loaded);
2201 } else { // UNTAGGED.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002202 if (CpuFeatures::IsSupported(SSE4_1)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002203 CpuFeatures::Scope sse4_scope(SSE4_1);
2204 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
2205 } else {
2206 __ pshufd(xmm0, xmm1, 0x1);
2207 __ movd(Operand(edx), xmm0);
2208 }
2209 __ movd(Operand(ebx), xmm1);
2210 }
2211
2212 // ST[0] or xmm1 == double value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002213 // ebx = low 32 bits of double value
2214 // edx = high 32 bits of double value
2215 // Compute hash (the shifts are arithmetic):
2216 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
2217 __ mov(ecx, ebx);
2218 __ xor_(ecx, Operand(edx));
2219 __ mov(eax, ecx);
2220 __ sar(eax, 16);
2221 __ xor_(ecx, Operand(eax));
2222 __ mov(eax, ecx);
2223 __ sar(eax, 8);
2224 __ xor_(ecx, Operand(eax));
Steve Block44f0eee2011-05-26 01:26:41 +01002225 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
2226 __ and_(Operand(ecx),
2227 Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002228
Ben Murdochb0fe1622011-05-05 13:52:32 +01002229 // ST[0] or xmm1 == double value.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002230 // ebx = low 32 bits of double value.
2231 // edx = high 32 bits of double value.
2232 // ecx = TranscendentalCache::hash(double value).
Steve Block44f0eee2011-05-26 01:26:41 +01002233 ExternalReference cache_array =
2234 ExternalReference::transcendental_cache_array_address(masm->isolate());
2235 __ mov(eax, Immediate(cache_array));
2236 int cache_array_index =
2237 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
2238 __ mov(eax, Operand(eax, cache_array_index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002239 // Eax points to the cache for the type type_.
2240 // If NULL, the cache hasn't been initialized yet, so go through runtime.
2241 __ test(eax, Operand(eax));
2242 __ j(zero, &runtime_call_clear_stack);
2243#ifdef DEBUG
2244 // Check that the layout of cache elements match expectations.
Steve Block44f0eee2011-05-26 01:26:41 +01002245 { TranscendentalCache::SubCache::Element test_elem[2];
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002246 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2247 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2248 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
2249 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
2250 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
2251 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2252 CHECK_EQ(0, elem_in0 - elem_start);
2253 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2254 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2255 }
2256#endif
2257 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2258 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2259 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2260 // Check if cache matches: Double value is stored in uint32_t[2] array.
Ben Murdoch257744e2011-11-30 15:57:28 +00002261 Label cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262 __ cmp(ebx, Operand(ecx, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002263 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002264 __ cmp(edx, Operand(ecx, kIntSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002265 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002266 // Cache hit!
2267 __ mov(eax, Operand(ecx, 2 * kIntSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002268 if (tagged) {
2269 __ fstp(0);
2270 __ ret(kPointerSize);
2271 } else { // UNTAGGED.
2272 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2273 __ Ret();
2274 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002275
2276 __ bind(&cache_miss);
2277 // Update cache with new value.
2278 // We are short on registers, so use no_reg as scratch.
2279 // This gives slightly larger code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002280 if (tagged) {
2281 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
2282 } else { // UNTAGGED.
2283 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2284 __ sub(Operand(esp), Immediate(kDoubleSize));
2285 __ movdbl(Operand(esp, 0), xmm1);
2286 __ fld_d(Operand(esp, 0));
2287 __ add(Operand(esp), Immediate(kDoubleSize));
2288 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002289 GenerateOperation(masm);
2290 __ mov(Operand(ecx, 0), ebx);
2291 __ mov(Operand(ecx, kIntSize), edx);
2292 __ mov(Operand(ecx, 2 * kIntSize), eax);
2293 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002294 if (tagged) {
2295 __ ret(kPointerSize);
2296 } else { // UNTAGGED.
2297 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2298 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002299
Ben Murdochb0fe1622011-05-05 13:52:32 +01002300 // Skip cache and return answer directly, only in untagged case.
2301 __ bind(&skip_cache);
2302 __ sub(Operand(esp), Immediate(kDoubleSize));
2303 __ movdbl(Operand(esp, 0), xmm1);
2304 __ fld_d(Operand(esp, 0));
2305 GenerateOperation(masm);
2306 __ fstp_d(Operand(esp, 0));
2307 __ movdbl(xmm1, Operand(esp, 0));
2308 __ add(Operand(esp), Immediate(kDoubleSize));
2309 // We return the value in xmm1 without adding it to the cache, but
2310 // we cause a scavenging GC so that future allocations will succeed.
2311 __ EnterInternalFrame();
2312 // Allocate an unused object bigger than a HeapNumber.
2313 __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
2314 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2315 __ LeaveInternalFrame();
2316 __ Ret();
2317 }
2318
2319 // Call runtime, doing whatever allocation and cleanup is necessary.
2320 if (tagged) {
2321 __ bind(&runtime_call_clear_stack);
2322 __ fstp(0);
2323 __ bind(&runtime_call);
Steve Block44f0eee2011-05-26 01:26:41 +01002324 ExternalReference runtime =
2325 ExternalReference(RuntimeFunction(), masm->isolate());
2326 __ TailCallExternalReference(runtime, 1, 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002327 } else { // UNTAGGED.
2328 __ bind(&runtime_call_clear_stack);
2329 __ bind(&runtime_call);
2330 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2331 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
2332 __ EnterInternalFrame();
2333 __ push(eax);
2334 __ CallRuntime(RuntimeFunction(), 1);
2335 __ LeaveInternalFrame();
2336 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2337 __ Ret();
2338 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002339}
2340
2341
2342Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
2343 switch (type_) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002344 case TranscendentalCache::SIN: return Runtime::kMath_sin;
2345 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002346 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002347 default:
2348 UNIMPLEMENTED();
2349 return Runtime::kAbort;
2350 }
2351}
2352
2353
2354void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
2355 // Only free register is edi.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002356 // Input value is on FP stack, and also in ebx/edx.
2357 // Input value is possibly in xmm1.
2358 // Address of result (a newly allocated HeapNumber) may be in eax.
2359 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
2360 // Both fsin and fcos require arguments in the range +/-2^63 and
2361 // return NaN for infinities and NaN. They can share all code except
2362 // the actual fsin/fcos operation.
Ben Murdoch257744e2011-11-30 15:57:28 +00002363 Label in_range, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002364 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2365 // work. We must reduce it to the appropriate range.
2366 __ mov(edi, edx);
2367 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
2368 int supported_exponent_limit =
2369 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
2370 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00002371 __ j(below, &in_range, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002372 // Check for infinity and NaN. Both return NaN for sin.
2373 __ cmp(Operand(edi), Immediate(0x7ff00000));
Ben Murdoch257744e2011-11-30 15:57:28 +00002374 Label non_nan_result;
2375 __ j(not_equal, &non_nan_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002376 // Input is +/-Infinity or NaN. Result is NaN.
2377 __ fstp(0);
2378 // NaN is represented by 0x7ff8000000000000.
2379 __ push(Immediate(0x7ff80000));
2380 __ push(Immediate(0));
2381 __ fld_d(Operand(esp, 0));
2382 __ add(Operand(esp), Immediate(2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002383 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002384
Ben Murdochb0fe1622011-05-05 13:52:32 +01002385 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002386
Ben Murdochb0fe1622011-05-05 13:52:32 +01002387 // Use fpmod to restrict argument to the range +/-2*PI.
2388 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2389 __ fldpi();
2390 __ fadd(0);
2391 __ fld(1);
2392 // FPU Stack: input, 2*pi, input.
2393 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002394 Label no_exceptions;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002395 __ fwait();
2396 __ fnstsw_ax();
2397 // Clear if Illegal Operand or Zero Division exceptions are set.
2398 __ test(Operand(eax), Immediate(5));
Ben Murdoch257744e2011-11-30 15:57:28 +00002399 __ j(zero, &no_exceptions, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002400 __ fnclex();
2401 __ bind(&no_exceptions);
2402 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002403
Ben Murdochb0fe1622011-05-05 13:52:32 +01002404 // Compute st(0) % st(1)
2405 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002406 Label partial_remainder_loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002407 __ bind(&partial_remainder_loop);
2408 __ fprem1();
2409 __ fwait();
2410 __ fnstsw_ax();
2411 __ test(Operand(eax), Immediate(0x400 /* C2 */));
2412 // If C2 is set, computation only has partial result. Loop to
2413 // continue computation.
2414 __ j(not_zero, &partial_remainder_loop);
2415 }
2416 // FPU Stack: input, 2*pi, input % 2*pi
2417 __ fstp(2);
2418 __ fstp(0);
2419 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
2420
2421 // FPU Stack: input % 2*pi
2422 __ bind(&in_range);
2423 switch (type_) {
2424 case TranscendentalCache::SIN:
2425 __ fsin();
2426 break;
2427 case TranscendentalCache::COS:
2428 __ fcos();
2429 break;
2430 default:
2431 UNREACHABLE();
2432 }
2433 __ bind(&done);
2434 } else {
2435 ASSERT(type_ == TranscendentalCache::LOG);
2436 __ fldln2();
2437 __ fxch();
2438 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002439 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002440}
2441
2442
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002443// Input: edx, eax are the left and right objects of a bit op.
2444// Output: eax, ecx are left and right integers for a bit op.
2445void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2446 bool use_sse3,
2447 Label* conversion_failure) {
2448 // Check float operands.
2449 Label arg1_is_object, check_undefined_arg1;
2450 Label arg2_is_object, check_undefined_arg2;
2451 Label load_arg2, done;
2452
2453 // Test if arg1 is a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002454 __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002455
2456 __ SmiUntag(edx);
2457 __ jmp(&load_arg2);
2458
2459 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2460 __ bind(&check_undefined_arg1);
Steve Block44f0eee2011-05-26 01:26:41 +01002461 Factory* factory = masm->isolate()->factory();
2462 __ cmp(edx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002463 __ j(not_equal, conversion_failure);
2464 __ mov(edx, Immediate(0));
2465 __ jmp(&load_arg2);
2466
2467 __ bind(&arg1_is_object);
2468 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002469 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002470 __ j(not_equal, &check_undefined_arg1);
2471
2472 // Get the untagged integer version of the edx heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002473 IntegerConvert(masm, edx, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002474 __ mov(edx, ecx);
2475
2476 // Here edx has the untagged integer, eax has a Smi or a heap number.
2477 __ bind(&load_arg2);
2478
2479 // Test if arg2 is a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002480 __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002481
2482 __ SmiUntag(eax);
2483 __ mov(ecx, eax);
2484 __ jmp(&done);
2485
2486 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2487 __ bind(&check_undefined_arg2);
Steve Block44f0eee2011-05-26 01:26:41 +01002488 __ cmp(eax, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002489 __ j(not_equal, conversion_failure);
2490 __ mov(ecx, Immediate(0));
2491 __ jmp(&done);
2492
2493 __ bind(&arg2_is_object);
2494 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002495 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002496 __ j(not_equal, &check_undefined_arg2);
2497
2498 // Get the untagged integer version of the eax heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002499 IntegerConvert(masm, eax, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002500 __ bind(&done);
2501 __ mov(eax, edx);
2502}
2503
2504
Ben Murdochb0fe1622011-05-05 13:52:32 +01002505void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
2506 bool use_sse3,
2507 Label* not_int32) {
2508 return;
2509}
2510
2511
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002512void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2513 Register number) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002514 Label load_smi, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002515
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002516 __ JumpIfSmi(number, &load_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002517 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002518 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002519
2520 __ bind(&load_smi);
2521 __ SmiUntag(number);
2522 __ push(number);
2523 __ fild_s(Operand(esp, 0));
2524 __ pop(number);
2525
2526 __ bind(&done);
2527}
2528
2529
2530void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002531 Label load_smi_edx, load_eax, load_smi_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002532 // Load operand in edx into xmm0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002533 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002534 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2535
2536 __ bind(&load_eax);
2537 // Load operand in eax into xmm1.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002538 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002539 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002540 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002541
2542 __ bind(&load_smi_edx);
2543 __ SmiUntag(edx); // Untag smi before converting to float.
2544 __ cvtsi2sd(xmm0, Operand(edx));
2545 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2546 __ jmp(&load_eax);
2547
2548 __ bind(&load_smi_eax);
2549 __ SmiUntag(eax); // Untag smi before converting to float.
2550 __ cvtsi2sd(xmm1, Operand(eax));
2551 __ SmiTag(eax); // Retag smi for heap number overwriting test.
2552
2553 __ bind(&done);
2554}
2555
2556
2557void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2558 Label* not_numbers) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002560 // Load operand in edx into xmm0, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002561 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002562 Factory* factory = masm->isolate()->factory();
2563 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002564 __ j(not_equal, not_numbers); // Argument in edx is not a number.
2565 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2566 __ bind(&load_eax);
2567 // Load operand in eax into xmm1, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002568 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002569 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +00002570 __ j(equal, &load_float_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002571 __ jmp(not_numbers); // Argument in eax is not a number.
2572 __ bind(&load_smi_edx);
2573 __ SmiUntag(edx); // Untag smi before converting to float.
2574 __ cvtsi2sd(xmm0, Operand(edx));
2575 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2576 __ jmp(&load_eax);
2577 __ bind(&load_smi_eax);
2578 __ SmiUntag(eax); // Untag smi before converting to float.
2579 __ cvtsi2sd(xmm1, Operand(eax));
2580 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Ben Murdoch257744e2011-11-30 15:57:28 +00002581 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002582 __ bind(&load_float_eax);
2583 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2584 __ bind(&done);
2585}
2586
2587
2588void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
2589 Register scratch) {
2590 const Register left = edx;
2591 const Register right = eax;
2592 __ mov(scratch, left);
2593 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2594 __ SmiUntag(scratch);
2595 __ cvtsi2sd(xmm0, Operand(scratch));
2596
2597 __ mov(scratch, right);
2598 __ SmiUntag(scratch);
2599 __ cvtsi2sd(xmm1, Operand(scratch));
2600}
2601
2602
Ben Murdochb0fe1622011-05-05 13:52:32 +01002603void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
2604 Label* non_int32,
2605 Register scratch) {
2606 __ cvttsd2si(scratch, Operand(xmm0));
2607 __ cvtsi2sd(xmm2, Operand(scratch));
2608 __ ucomisd(xmm0, xmm2);
2609 __ j(not_zero, non_int32);
2610 __ j(carry, non_int32);
2611 __ cvttsd2si(scratch, Operand(xmm1));
2612 __ cvtsi2sd(xmm2, Operand(scratch));
2613 __ ucomisd(xmm1, xmm2);
2614 __ j(not_zero, non_int32);
2615 __ j(carry, non_int32);
2616}
2617
2618
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002619void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2620 Register scratch,
2621 ArgLocation arg_location) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002622 Label load_smi_1, load_smi_2, done_load_1, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002623 if (arg_location == ARGS_IN_REGISTERS) {
2624 __ mov(scratch, edx);
2625 } else {
2626 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2627 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002628 __ JumpIfSmi(scratch, &load_smi_1, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002629 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2630 __ bind(&done_load_1);
2631
2632 if (arg_location == ARGS_IN_REGISTERS) {
2633 __ mov(scratch, eax);
2634 } else {
2635 __ mov(scratch, Operand(esp, 1 * kPointerSize));
2636 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002637 __ JumpIfSmi(scratch, &load_smi_2, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002638 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002639 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002640
2641 __ bind(&load_smi_1);
2642 __ SmiUntag(scratch);
2643 __ push(scratch);
2644 __ fild_s(Operand(esp, 0));
2645 __ pop(scratch);
2646 __ jmp(&done_load_1);
2647
2648 __ bind(&load_smi_2);
2649 __ SmiUntag(scratch);
2650 __ push(scratch);
2651 __ fild_s(Operand(esp, 0));
2652 __ pop(scratch);
2653
2654 __ bind(&done);
2655}
2656
2657
2658void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
2659 Register scratch) {
2660 const Register left = edx;
2661 const Register right = eax;
2662 __ mov(scratch, left);
2663 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2664 __ SmiUntag(scratch);
2665 __ push(scratch);
2666 __ fild_s(Operand(esp, 0));
2667
2668 __ mov(scratch, right);
2669 __ SmiUntag(scratch);
2670 __ mov(Operand(esp, 0), scratch);
2671 __ fild_s(Operand(esp, 0));
2672 __ pop(scratch);
2673}
2674
2675
2676void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2677 Label* non_float,
2678 Register scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002679 Label test_other, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002680 // Test if both operands are floats or smi -> scratch=k_is_float;
2681 // Otherwise scratch = k_not_float.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002682 __ JumpIfSmi(edx, &test_other, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002683 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002684 Factory* factory = masm->isolate()->factory();
2685 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002686 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
2687
2688 __ bind(&test_other);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002689 __ JumpIfSmi(eax, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002690 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002691 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002692 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
2693
2694 // Fall-through: Both operands are numbers.
2695 __ bind(&done);
2696}
2697
2698
Ben Murdochb0fe1622011-05-05 13:52:32 +01002699void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
2700 Label* non_int32) {
2701 return;
2702}
2703
2704
Ben Murdochb0fe1622011-05-05 13:52:32 +01002705void MathPowStub::Generate(MacroAssembler* masm) {
2706 // Registers are used as follows:
2707 // edx = base
2708 // eax = exponent
2709 // ecx = temporary, result
2710
2711 CpuFeatures::Scope use_sse2(SSE2);
2712 Label allocate_return, call_runtime;
2713
2714 // Load input parameters.
2715 __ mov(edx, Operand(esp, 2 * kPointerSize));
2716 __ mov(eax, Operand(esp, 1 * kPointerSize));
2717
2718 // Save 1 in xmm3 - we need this several times later on.
2719 __ mov(ecx, Immediate(1));
2720 __ cvtsi2sd(xmm3, Operand(ecx));
2721
2722 Label exponent_nonsmi;
2723 Label base_nonsmi;
2724 // If the exponent is a heap number go to that specific case.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002725 __ JumpIfNotSmi(eax, &exponent_nonsmi);
2726 __ JumpIfNotSmi(edx, &base_nonsmi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002727
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002728 // Optimized version when both exponent and base are smis.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002729 Label powi;
2730 __ SmiUntag(edx);
2731 __ cvtsi2sd(xmm0, Operand(edx));
2732 __ jmp(&powi);
2733 // exponent is smi and base is a heapnumber.
2734 __ bind(&base_nonsmi);
Steve Block44f0eee2011-05-26 01:26:41 +01002735 Factory* factory = masm->isolate()->factory();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002736 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002737 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002738 __ j(not_equal, &call_runtime);
2739
2740 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2741
2742 // Optimized version of pow if exponent is a smi.
2743 // xmm0 contains the base.
2744 __ bind(&powi);
2745 __ SmiUntag(eax);
2746
2747 // Save exponent in base as we need to check if exponent is negative later.
2748 // We know that base and exponent are in different registers.
2749 __ mov(edx, eax);
2750
2751 // Get absolute value of exponent.
Ben Murdoch257744e2011-11-30 15:57:28 +00002752 Label no_neg;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002753 __ cmp(eax, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002754 __ j(greater_equal, &no_neg, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002755 __ neg(eax);
2756 __ bind(&no_neg);
2757
2758 // Load xmm1 with 1.
2759 __ movsd(xmm1, xmm3);
Ben Murdoch257744e2011-11-30 15:57:28 +00002760 Label while_true;
2761 Label no_multiply;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002762
2763 __ bind(&while_true);
2764 __ shr(eax, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002765 __ j(not_carry, &no_multiply, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002766 __ mulsd(xmm1, xmm0);
2767 __ bind(&no_multiply);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002768 __ mulsd(xmm0, xmm0);
2769 __ j(not_zero, &while_true);
2770
2771 // base has the original value of the exponent - if the exponent is
2772 // negative return 1/result.
2773 __ test(edx, Operand(edx));
2774 __ j(positive, &allocate_return);
2775 // Special case if xmm1 has reached infinity.
2776 __ mov(ecx, Immediate(0x7FB00000));
2777 __ movd(xmm0, Operand(ecx));
2778 __ cvtss2sd(xmm0, xmm0);
2779 __ ucomisd(xmm0, xmm1);
2780 __ j(equal, &call_runtime);
2781 __ divsd(xmm3, xmm1);
2782 __ movsd(xmm1, xmm3);
2783 __ jmp(&allocate_return);
2784
2785 // exponent (or both) is a heapnumber - no matter what we should now work
2786 // on doubles.
2787 __ bind(&exponent_nonsmi);
2788 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002789 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002790 __ j(not_equal, &call_runtime);
2791 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2792 // Test if exponent is nan.
2793 __ ucomisd(xmm1, xmm1);
2794 __ j(parity_even, &call_runtime);
2795
Ben Murdoch257744e2011-11-30 15:57:28 +00002796 Label base_not_smi;
2797 Label handle_special_cases;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002798 __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002799 __ SmiUntag(edx);
2800 __ cvtsi2sd(xmm0, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +00002801 __ jmp(&handle_special_cases, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002802
2803 __ bind(&base_not_smi);
2804 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002805 factory->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002806 __ j(not_equal, &call_runtime);
2807 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
2808 __ and_(ecx, HeapNumber::kExponentMask);
2809 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
2810 // base is NaN or +/-Infinity
2811 __ j(greater_equal, &call_runtime);
2812 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2813
2814 // base is in xmm0 and exponent is in xmm1.
2815 __ bind(&handle_special_cases);
Ben Murdoch257744e2011-11-30 15:57:28 +00002816 Label not_minus_half;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002817 // Test for -0.5.
2818 // Load xmm2 with -0.5.
2819 __ mov(ecx, Immediate(0xBF000000));
2820 __ movd(xmm2, Operand(ecx));
2821 __ cvtss2sd(xmm2, xmm2);
2822 // xmm2 now has -0.5.
2823 __ ucomisd(xmm2, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002824 __ j(not_equal, &not_minus_half, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002825
2826 // Calculates reciprocal of square root.
Steve Block1e0659c2011-05-24 12:43:12 +01002827 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00002828 __ xorps(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002829 __ addsd(xmm1, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002830 __ sqrtsd(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002831 __ divsd(xmm3, xmm1);
2832 __ movsd(xmm1, xmm3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002833 __ jmp(&allocate_return);
2834
2835 // Test for 0.5.
2836 __ bind(&not_minus_half);
2837 // Load xmm2 with 0.5.
2838 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2839 __ addsd(xmm2, xmm3);
2840 // xmm2 now has 0.5.
2841 __ ucomisd(xmm2, xmm1);
2842 __ j(not_equal, &call_runtime);
2843 // Calculates square root.
Steve Block1e0659c2011-05-24 12:43:12 +01002844 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00002845 __ xorps(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002846 __ addsd(xmm1, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002847 __ sqrtsd(xmm1, xmm1);
2848
2849 __ bind(&allocate_return);
2850 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
2851 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
2852 __ mov(eax, ecx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002853 __ ret(2 * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002854
2855 __ bind(&call_runtime);
2856 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2857}
2858
2859
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002860void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2861 // The key is in edx and the parameter count is in eax.
2862
2863 // The displacement is used for skipping the frame pointer on the
2864 // stack. It is the offset of the last parameter (if any) relative
2865 // to the frame pointer.
2866 static const int kDisplacement = 1 * kPointerSize;
2867
2868 // Check that the key is a smi.
2869 Label slow;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002870 __ JumpIfNotSmi(edx, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002871
2872 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch257744e2011-11-30 15:57:28 +00002873 Label adaptor;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002874 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2875 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
2876 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002877 __ j(equal, &adaptor, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002878
2879 // Check index against formal parameters count limit passed in
2880 // through register eax. Use unsigned comparison to get negative
2881 // check for free.
2882 __ cmp(edx, Operand(eax));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002883 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002884
2885 // Read the argument from the stack and return it.
2886 STATIC_ASSERT(kSmiTagSize == 1);
2887 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2888 __ lea(ebx, Operand(ebp, eax, times_2, 0));
2889 __ neg(edx);
2890 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2891 __ ret(0);
2892
2893 // Arguments adaptor case: Check index against actual arguments
2894 // limit found in the arguments adaptor frame. Use unsigned
2895 // comparison to get negative check for free.
2896 __ bind(&adaptor);
2897 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2898 __ cmp(edx, Operand(ecx));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002899 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002900
2901 // Read the argument from the stack and return it.
2902 STATIC_ASSERT(kSmiTagSize == 1);
2903 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2904 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
2905 __ neg(edx);
2906 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2907 __ ret(0);
2908
2909 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2910 // by calling the runtime system.
2911 __ bind(&slow);
2912 __ pop(ebx); // Return address.
2913 __ push(edx);
2914 __ push(ebx);
2915 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2916}
2917
2918
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002919void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002920 // esp[0] : return address
2921 // esp[4] : number of parameters
2922 // esp[8] : receiver displacement
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002923 // esp[12] : function
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002924
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002925 // Check if the calling frame is an arguments adaptor frame.
2926 Label runtime;
2927 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2928 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2929 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2930 __ j(not_equal, &runtime, Label::kNear);
2931
2932 // Patch the arguments.length and the parameters pointer.
2933 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2934 __ mov(Operand(esp, 1 * kPointerSize), ecx);
2935 __ lea(edx, Operand(edx, ecx, times_2,
2936 StandardFrameConstants::kCallerSPOffset));
2937 __ mov(Operand(esp, 2 * kPointerSize), edx);
2938
2939 __ bind(&runtime);
2940 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2941}
2942
2943
2944void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2945 // esp[0] : return address
2946 // esp[4] : number of parameters (tagged)
2947 // esp[8] : receiver displacement
2948 // esp[12] : function
2949
2950 // ebx = parameter count (tagged)
2951 __ mov(ebx, Operand(esp, 1 * kPointerSize));
2952
2953 // Check if the calling frame is an arguments adaptor frame.
2954 // TODO(rossberg): Factor out some of the bits that are shared with the other
2955 // Generate* functions.
2956 Label runtime;
2957 Label adaptor_frame, try_allocate;
2958 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2959 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2960 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2961 __ j(equal, &adaptor_frame, Label::kNear);
2962
2963 // No adaptor, parameter count = argument count.
2964 __ mov(ecx, ebx);
2965 __ jmp(&try_allocate, Label::kNear);
2966
2967 // We have an adaptor frame. Patch the parameters pointer.
2968 __ bind(&adaptor_frame);
2969 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2970 __ lea(edx, Operand(edx, ecx, times_2,
2971 StandardFrameConstants::kCallerSPOffset));
2972 __ mov(Operand(esp, 2 * kPointerSize), edx);
2973
2974 // ebx = parameter count (tagged)
2975 // ecx = argument count (tagged)
2976 // esp[4] = parameter count (tagged)
2977 // esp[8] = address of receiver argument
2978 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
2979 __ cmp(ebx, Operand(ecx));
2980 __ j(less_equal, &try_allocate, Label::kNear);
2981 __ mov(ebx, ecx);
2982
2983 __ bind(&try_allocate);
2984
2985 // Save mapped parameter count.
2986 __ push(ebx);
2987
2988 // Compute the sizes of backing store, parameter map, and arguments object.
2989 // 1. Parameter map, has 2 extra words containing context and backing store.
2990 const int kParameterMapHeaderSize =
2991 FixedArray::kHeaderSize + 2 * kPointerSize;
2992 Label no_parameter_map;
2993 __ test(ebx, Operand(ebx));
2994 __ j(zero, &no_parameter_map, Label::kNear);
2995 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
2996 __ bind(&no_parameter_map);
2997
2998 // 2. Backing store.
2999 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
3000
3001 // 3. Arguments object.
3002 __ add(Operand(ebx), Immediate(Heap::kArgumentsObjectSize));
3003
3004 // Do the allocation of all three objects in one go.
3005 __ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
3006
3007 // eax = address of new object(s) (tagged)
3008 // ecx = argument count (tagged)
3009 // esp[0] = mapped parameter count (tagged)
3010 // esp[8] = parameter count (tagged)
3011 // esp[12] = address of receiver argument
3012 // Get the arguments boilerplate from the current (global) context into edi.
3013 Label has_mapped_parameters, copy;
3014 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3015 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
3016 __ mov(ebx, Operand(esp, 0 * kPointerSize));
3017 __ test(ebx, Operand(ebx));
3018 __ j(not_zero, &has_mapped_parameters, Label::kNear);
3019 __ mov(edi, Operand(edi,
3020 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
3021 __ jmp(&copy, Label::kNear);
3022
3023 __ bind(&has_mapped_parameters);
3024 __ mov(edi, Operand(edi,
3025 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
3026 __ bind(&copy);
3027
3028 // eax = address of new object (tagged)
3029 // ebx = mapped parameter count (tagged)
3030 // ecx = argument count (tagged)
3031 // edi = address of boilerplate object (tagged)
3032 // esp[0] = mapped parameter count (tagged)
3033 // esp[8] = parameter count (tagged)
3034 // esp[12] = address of receiver argument
3035 // Copy the JS object part.
3036 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3037 __ mov(edx, FieldOperand(edi, i));
3038 __ mov(FieldOperand(eax, i), edx);
3039 }
3040
3041 // Setup the callee in-object property.
3042 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
3043 __ mov(edx, Operand(esp, 4 * kPointerSize));
3044 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3045 Heap::kArgumentsCalleeIndex * kPointerSize),
3046 edx);
3047
3048 // Use the length (smi tagged) and set that as an in-object property too.
3049 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
3050 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3051 Heap::kArgumentsLengthIndex * kPointerSize),
3052 ecx);
3053
3054 // Setup the elements pointer in the allocated arguments object.
3055 // If we allocated a parameter map, edi will point there, otherwise to the
3056 // backing store.
3057 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
3058 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3059
3060 // eax = address of new object (tagged)
3061 // ebx = mapped parameter count (tagged)
3062 // ecx = argument count (tagged)
3063 // edi = address of parameter map or backing store (tagged)
3064 // esp[0] = mapped parameter count (tagged)
3065 // esp[8] = parameter count (tagged)
3066 // esp[12] = address of receiver argument
3067 // Free a register.
3068 __ push(eax);
3069
3070 // Initialize parameter map. If there are no mapped arguments, we're done.
3071 Label skip_parameter_map;
3072 __ test(ebx, Operand(ebx));
3073 __ j(zero, &skip_parameter_map);
3074
3075 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3076 Immediate(FACTORY->non_strict_arguments_elements_map()));
3077 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
3078 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
3079 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
3080 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
3081 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
3082
3083 // Copy the parameter slots and the holes in the arguments.
3084 // We need to fill in mapped_parameter_count slots. They index the context,
3085 // where parameters are stored in reverse order, at
3086 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
3087 // The mapped parameter thus need to get indices
3088 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
3089 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
3090 // We loop from right to left.
3091 Label parameters_loop, parameters_test;
3092 __ push(ecx);
3093 __ mov(eax, Operand(esp, 2 * kPointerSize));
3094 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
3095 __ add(ebx, Operand(esp, 4 * kPointerSize));
3096 __ sub(ebx, Operand(eax));
3097 __ mov(ecx, FACTORY->the_hole_value());
3098 __ mov(edx, edi);
3099 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
3100 // eax = loop variable (tagged)
3101 // ebx = mapping index (tagged)
3102 // ecx = the hole value
3103 // edx = address of parameter map (tagged)
3104 // edi = address of backing store (tagged)
3105 // esp[0] = argument count (tagged)
3106 // esp[4] = address of new object (tagged)
3107 // esp[8] = mapped parameter count (tagged)
3108 // esp[16] = parameter count (tagged)
3109 // esp[20] = address of receiver argument
3110 __ jmp(&parameters_test, Label::kNear);
3111
3112 __ bind(&parameters_loop);
3113 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3114 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
3115 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
3116 __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
3117 __ bind(&parameters_test);
3118 __ test(eax, Operand(eax));
3119 __ j(not_zero, &parameters_loop, Label::kNear);
3120 __ pop(ecx);
3121
3122 __ bind(&skip_parameter_map);
3123
3124 // ecx = argument count (tagged)
3125 // edi = address of backing store (tagged)
3126 // esp[0] = address of new object (tagged)
3127 // esp[4] = mapped parameter count (tagged)
3128 // esp[12] = parameter count (tagged)
3129 // esp[16] = address of receiver argument
3130 // Copy arguments header and remaining slots (if there are any).
3131 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3132 Immediate(FACTORY->fixed_array_map()));
3133 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3134
3135 Label arguments_loop, arguments_test;
3136 __ mov(ebx, Operand(esp, 1 * kPointerSize));
3137 __ mov(edx, Operand(esp, 4 * kPointerSize));
3138 __ sub(Operand(edx), ebx); // Is there a smarter way to do negative scaling?
3139 __ sub(Operand(edx), ebx);
3140 __ jmp(&arguments_test, Label::kNear);
3141
3142 __ bind(&arguments_loop);
3143 __ sub(Operand(edx), Immediate(kPointerSize));
3144 __ mov(eax, Operand(edx, 0));
3145 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
3146 __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
3147
3148 __ bind(&arguments_test);
3149 __ cmp(ebx, Operand(ecx));
3150 __ j(less, &arguments_loop, Label::kNear);
3151
3152 // Restore.
3153 __ pop(eax); // Address of arguments object.
3154 __ pop(ebx); // Parameter count.
3155
3156 // Return and remove the on-stack parameters.
3157 __ ret(3 * kPointerSize);
3158
3159 // Do the runtime call to allocate the arguments object.
3160 __ bind(&runtime);
3161 __ pop(eax); // Remove saved parameter count.
3162 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
3163 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3164}
3165
3166
3167void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
3168 // esp[0] : return address
3169 // esp[4] : number of parameters
3170 // esp[8] : receiver displacement
3171 // esp[12] : function
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003172
3173 // Check if the calling frame is an arguments adaptor frame.
3174 Label adaptor_frame, try_allocate, runtime;
3175 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3176 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3177 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003178 __ j(equal, &adaptor_frame, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003179
3180 // Get the length from the frame.
3181 __ mov(ecx, Operand(esp, 1 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003182 __ jmp(&try_allocate, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003183
3184 // Patch the arguments.length and the parameters pointer.
3185 __ bind(&adaptor_frame);
3186 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3187 __ mov(Operand(esp, 1 * kPointerSize), ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003188 __ lea(edx, Operand(edx, ecx, times_2,
3189 StandardFrameConstants::kCallerSPOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003190 __ mov(Operand(esp, 2 * kPointerSize), edx);
3191
3192 // Try the new space allocation. Start out with computing the size of
3193 // the arguments object and the elements array.
Ben Murdoch257744e2011-11-30 15:57:28 +00003194 Label add_arguments_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003195 __ bind(&try_allocate);
3196 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003197 __ j(zero, &add_arguments_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003198 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
3199 __ bind(&add_arguments_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003200 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSizeStrict));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003201
3202 // Do the allocation of both objects in one go.
3203 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
3204
3205 // Get the arguments boilerplate from the current (global) context.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003206 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3207 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003208 const int offset =
3209 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
3210 __ mov(edi, Operand(edi, offset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003211
3212 // Copy the JS object part.
3213 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3214 __ mov(ebx, FieldOperand(edi, i));
3215 __ mov(FieldOperand(eax, i), ebx);
3216 }
3217
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003218 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01003219 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003220 __ mov(ecx, Operand(esp, 1 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003221 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003222 Heap::kArgumentsLengthIndex * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01003223 ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003224
3225 // If there are no actual arguments, we're done.
3226 Label done;
3227 __ test(ecx, Operand(ecx));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003228 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003229
3230 // Get the parameters pointer from the stack.
3231 __ mov(edx, Operand(esp, 2 * kPointerSize));
3232
3233 // Setup the elements pointer in the allocated arguments object and
3234 // initialize the header in the elements fixed array.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003235 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003236 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3237 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003238 Immediate(FACTORY->fixed_array_map()));
Steve Block44f0eee2011-05-26 01:26:41 +01003239
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003240 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3241 // Untag the length for the loop below.
3242 __ SmiUntag(ecx);
3243
3244 // Copy the fixed array slots.
Ben Murdoch257744e2011-11-30 15:57:28 +00003245 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003246 __ bind(&loop);
3247 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3248 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
3249 __ add(Operand(edi), Immediate(kPointerSize));
3250 __ sub(Operand(edx), Immediate(kPointerSize));
3251 __ dec(ecx);
3252 __ j(not_zero, &loop);
3253
3254 // Return and remove the on-stack parameters.
3255 __ bind(&done);
3256 __ ret(3 * kPointerSize);
3257
3258 // Do the runtime call to allocate the arguments object.
3259 __ bind(&runtime);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003260 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003261}
3262
3263
3264void RegExpExecStub::Generate(MacroAssembler* masm) {
3265 // Just jump directly to runtime if native RegExp is not selected at compile
3266 // time or if regexp entry in generated code is turned off runtime switch or
3267 // at compilation.
3268#ifdef V8_INTERPRETED_REGEXP
3269 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3270#else // V8_INTERPRETED_REGEXP
3271 if (!FLAG_regexp_entry_native) {
3272 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3273 return;
3274 }
3275
3276 // Stack frame on entry.
3277 // esp[0]: return address
3278 // esp[4]: last_match_info (expected JSArray)
3279 // esp[8]: previous index
3280 // esp[12]: subject string
3281 // esp[16]: JSRegExp object
3282
3283 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3284 static const int kPreviousIndexOffset = 2 * kPointerSize;
3285 static const int kSubjectOffset = 3 * kPointerSize;
3286 static const int kJSRegExpOffset = 4 * kPointerSize;
3287
3288 Label runtime, invoke_regexp;
3289
3290 // Ensure that a RegExp stack is allocated.
3291 ExternalReference address_of_regexp_stack_memory_address =
Steve Block44f0eee2011-05-26 01:26:41 +01003292 ExternalReference::address_of_regexp_stack_memory_address(
3293 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003294 ExternalReference address_of_regexp_stack_memory_size =
Steve Block44f0eee2011-05-26 01:26:41 +01003295 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003296 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3297 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003298 __ j(zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003299
3300 // Check that the first argument is a JSRegExp object.
3301 __ mov(eax, Operand(esp, kJSRegExpOffset));
3302 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003303 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003304 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3305 __ j(not_equal, &runtime);
3306 // Check that the RegExp has been compiled (data contains a fixed array).
3307 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3308 if (FLAG_debug_code) {
3309 __ test(ecx, Immediate(kSmiTagMask));
3310 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
3311 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
3312 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
3313 }
3314
3315 // ecx: RegExp data (FixedArray)
3316 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3317 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
3318 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
3319 __ j(not_equal, &runtime);
3320
3321 // ecx: RegExp data (FixedArray)
3322 // Check that the number of captures fit in the static offsets vector buffer.
3323 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3324 // Calculate number of capture registers (number_of_captures + 1) * 2. This
3325 // uses the asumption that smis are 2 * their untagged value.
3326 STATIC_ASSERT(kSmiTag == 0);
3327 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3328 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3329 // Check that the static offsets vector buffer is large enough.
3330 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
3331 __ j(above, &runtime);
3332
3333 // ecx: RegExp data (FixedArray)
3334 // edx: Number of capture registers
3335 // Check that the second argument is a string.
3336 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003337 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003338 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3339 __ j(NegateCondition(is_string), &runtime);
3340 // Get the length of the string to ebx.
3341 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3342
3343 // ebx: Length of subject string as a smi
3344 // ecx: RegExp data (FixedArray)
3345 // edx: Number of capture registers
3346 // Check that the third argument is a positive smi less than the subject
3347 // string length. A negative value will be greater (unsigned comparison).
3348 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003349 __ JumpIfNotSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003350 __ cmp(eax, Operand(ebx));
3351 __ j(above_equal, &runtime);
3352
3353 // ecx: RegExp data (FixedArray)
3354 // edx: Number of capture registers
3355 // Check that the fourth object is a JSArray object.
3356 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003357 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003358 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3359 __ j(not_equal, &runtime);
3360 // Check that the JSArray is in fast case.
3361 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3362 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003363 Factory* factory = masm->isolate()->factory();
3364 __ cmp(eax, factory->fixed_array_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003365 __ j(not_equal, &runtime);
3366 // Check that the last match info has space for the capture registers and the
3367 // additional information.
3368 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3369 __ SmiUntag(eax);
3370 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3371 __ cmp(edx, Operand(eax));
3372 __ j(greater, &runtime);
3373
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003374 // Reset offset for possibly sliced string.
3375 __ Set(edi, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003376 // ecx: RegExp data (FixedArray)
3377 // Check the representation and encoding of the subject string.
3378 Label seq_ascii_string, seq_two_byte_string, check_code;
3379 __ mov(eax, Operand(esp, kSubjectOffset));
3380 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3381 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3382 // First check for flat two byte string.
3383 __ and_(ebx,
3384 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
3385 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003386 __ j(zero, &seq_two_byte_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003387 // Any other flat string must be a flat ascii string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003388 __ and_(Operand(ebx),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003389 Immediate(kIsNotStringMask | kStringRepresentationMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003390 __ j(zero, &seq_ascii_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003391
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003392 // Check for flat cons string or sliced string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003393 // A flat cons string is a cons string where the second part is the empty
3394 // string. In that case the subject string is just the first part of the cons
3395 // string. Also in this case the first part of the cons string is known to be
3396 // a sequential string or an external string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003397 // In the case of a sliced string its offset has to be taken into account.
3398 Label cons_string, check_encoding;
3399 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
3400 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
3401 __ cmp(Operand(ebx), Immediate(kExternalStringTag));
3402 __ j(less, &cons_string);
3403 __ j(equal, &runtime);
3404
3405 // String is sliced.
3406 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
3407 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
3408 // edi: offset of sliced string, smi-tagged.
3409 // eax: parent string.
3410 __ jmp(&check_encoding, Label::kNear);
3411 // String is a cons string, check whether it is flat.
3412 __ bind(&cons_string);
3413 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003414 __ j(not_equal, &runtime);
3415 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003416 __ bind(&check_encoding);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003417 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003418 // eax: first part of cons string or parent of sliced string.
3419 // ebx: map of first part of cons string or map of parent of sliced string.
3420 // Is first part of cons or parent of slice a flat two byte string?
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003421 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3422 kStringRepresentationMask | kStringEncodingMask);
3423 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003424 __ j(zero, &seq_two_byte_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003425 // Any other flat string must be ascii.
3426 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3427 kStringRepresentationMask);
3428 __ j(not_zero, &runtime);
3429
3430 __ bind(&seq_ascii_string);
3431 // eax: subject string (flat ascii)
3432 // ecx: RegExp data (FixedArray)
3433 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003434 __ Set(ecx, Immediate(1)); // Type is ascii.
3435 __ jmp(&check_code, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003436
3437 __ bind(&seq_two_byte_string);
3438 // eax: subject string (flat two byte)
3439 // ecx: RegExp data (FixedArray)
3440 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003441 __ Set(ecx, Immediate(0)); // Type is two byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003442
3443 __ bind(&check_code);
3444 // Check that the irregexp code has been generated for the actual string
3445 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00003446 // a smi (code flushing support).
3447 __ JumpIfSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003448
3449 // eax: subject string
3450 // edx: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003451 // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003452 // Load used arguments before starting to push arguments for call to native
3453 // RegExp code to avoid handling changing stack height.
3454 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3455 __ SmiUntag(ebx); // Previous index from smi.
3456
3457 // eax: subject string
3458 // ebx: previous index
3459 // edx: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003460 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003461 // All checks done. Now push arguments for native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01003462 Counters* counters = masm->isolate()->counters();
3463 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003464
Steve Block44f0eee2011-05-26 01:26:41 +01003465 // Isolates: note we add an additional parameter here (isolate pointer).
3466 static const int kRegExpExecuteArguments = 8;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003467 __ EnterApiExitFrame(kRegExpExecuteArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003468
Steve Block44f0eee2011-05-26 01:26:41 +01003469 // Argument 8: Pass current isolate address.
3470 __ mov(Operand(esp, 7 * kPointerSize),
3471 Immediate(ExternalReference::isolate_address()));
3472
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003473 // Argument 7: Indicate that this is a direct call from JavaScript.
3474 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3475
3476 // Argument 6: Start (high end) of backtracking stack memory area.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003477 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3478 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3479 __ mov(Operand(esp, 5 * kPointerSize), esi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003480
3481 // Argument 5: static offsets vector buffer.
3482 __ mov(Operand(esp, 4 * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01003483 Immediate(ExternalReference::address_of_static_offsets_vector(
3484 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003485
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003486 // Argument 2: Previous index.
3487 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3488
3489 // Argument 1: Original subject string.
3490 // The original subject is in the previous stack frame. Therefore we have to
3491 // use ebp, which points exactly to one pointer size below the previous esp.
3492 // (Because creating a new stack frame pushes the previous ebp onto the stack
3493 // and thereby moves up esp by one kPointerSize.)
3494 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
3495 __ mov(Operand(esp, 0 * kPointerSize), esi);
3496
3497 // esi: original subject string
3498 // eax: underlying subject string
3499 // ebx: previous index
3500 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
3501 // edx: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003502 // Argument 4: End of string data
3503 // Argument 3: Start of string data
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003504 // Prepare start and end index of the input.
3505 // Load the length from the original sliced string if that is the case.
3506 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
3507 __ add(esi, Operand(edi)); // Calculate input end wrt offset.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003508 __ SmiUntag(edi);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003509 __ add(ebx, Operand(edi)); // Calculate input start wrt offset.
3510
3511 // ebx: start index of the input string
3512 // esi: end index of the input string
3513 Label setup_two_byte, setup_rest;
3514 __ test(ecx, Operand(ecx));
3515 __ j(zero, &setup_two_byte, Label::kNear);
3516 __ SmiUntag(esi);
3517 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003518 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3519 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3520 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003521 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003522
3523 __ bind(&setup_two_byte);
3524 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003525 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
3526 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003527 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3528 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3529 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3530
3531 __ bind(&setup_rest);
3532
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003533 // Locate the code entry and call it.
3534 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003535 __ call(Operand(edx));
3536
3537 // Drop arguments and come back to JS mode.
3538 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003539
3540 // Check the result.
3541 Label success;
3542 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
Ben Murdoch257744e2011-11-30 15:57:28 +00003543 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003544 Label failure;
3545 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003546 __ j(equal, &failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003547 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3548 // If not exception it can only be retry. Handle that in the runtime system.
3549 __ j(not_equal, &runtime);
3550 // Result must now be exception. If there is no pending exception already a
3551 // stack overflow (on the backtrack stack) was detected in RegExp code but
3552 // haven't created the exception yet. Handle that in the runtime system.
3553 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +01003554 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3555 masm->isolate());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003556 __ mov(edx,
Steve Block44f0eee2011-05-26 01:26:41 +01003557 Operand::StaticVariable(ExternalReference::the_hole_value_location(
3558 masm->isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003559 __ mov(eax, Operand::StaticVariable(pending_exception));
3560 __ cmp(edx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003561 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003562 // For exception, throw the exception again.
3563
3564 // Clear the pending exception variable.
3565 __ mov(Operand::StaticVariable(pending_exception), edx);
3566
3567 // Special handling of termination exceptions which are uncatchable
3568 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01003569 __ cmp(eax, factory->termination_exception());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003570 Label throw_termination_exception;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003571 __ j(equal, &throw_termination_exception, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003572
3573 // Handle normal exception by following handler chain.
3574 __ Throw(eax);
3575
3576 __ bind(&throw_termination_exception);
3577 __ ThrowUncatchable(TERMINATION, eax);
3578
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003579 __ bind(&failure);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003580 // For failure to match, return null.
Steve Block44f0eee2011-05-26 01:26:41 +01003581 __ mov(Operand(eax), factory->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003582 __ ret(4 * kPointerSize);
3583
3584 // Load RegExp data.
3585 __ bind(&success);
3586 __ mov(eax, Operand(esp, kJSRegExpOffset));
3587 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3588 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3589 // Calculate number of capture registers (number_of_captures + 1) * 2.
3590 STATIC_ASSERT(kSmiTag == 0);
3591 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3592 __ add(Operand(edx), Immediate(2)); // edx was a smi.
3593
3594 // edx: Number of capture registers
3595 // Load last_match_info which is still known to be a fast case JSArray.
3596 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3597 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3598
3599 // ebx: last_match_info backing store (FixedArray)
3600 // edx: number of capture registers
3601 // Store the capture count.
3602 __ SmiTag(edx); // Number of capture registers to smi.
3603 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
3604 __ SmiUntag(edx); // Number of capture registers back from smi.
3605 // Store last subject and last input.
3606 __ mov(eax, Operand(esp, kSubjectOffset));
3607 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
3608 __ mov(ecx, ebx);
3609 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
3610 __ mov(eax, Operand(esp, kSubjectOffset));
3611 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
3612 __ mov(ecx, ebx);
3613 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
3614
3615 // Get the static offsets vector filled by the native regexp code.
3616 ExternalReference address_of_static_offsets_vector =
Steve Block44f0eee2011-05-26 01:26:41 +01003617 ExternalReference::address_of_static_offsets_vector(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003618 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3619
3620 // ebx: last_match_info backing store (FixedArray)
3621 // ecx: offsets vector
3622 // edx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00003623 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003624 // Capture register counter starts from number of capture registers and
3625 // counts down until wraping after zero.
3626 __ bind(&next_capture);
3627 __ sub(Operand(edx), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003628 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003629 // Read the value from the static offsets vector buffer.
3630 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
3631 __ SmiTag(edi);
3632 // Store the smi value in the last match info.
3633 __ mov(FieldOperand(ebx,
3634 edx,
3635 times_pointer_size,
3636 RegExpImpl::kFirstCaptureOffset),
3637 edi);
3638 __ jmp(&next_capture);
3639 __ bind(&done);
3640
3641 // Return last match info.
3642 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3643 __ ret(4 * kPointerSize);
3644
3645 // Do the runtime call to execute the regexp.
3646 __ bind(&runtime);
3647 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3648#endif // V8_INTERPRETED_REGEXP
3649}
3650
3651
Ben Murdochb0fe1622011-05-05 13:52:32 +01003652void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3653 const int kMaxInlineLength = 100;
3654 Label slowcase;
Ben Murdoch257744e2011-11-30 15:57:28 +00003655 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003656 __ mov(ebx, Operand(esp, kPointerSize * 3));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003657 __ JumpIfNotSmi(ebx, &slowcase);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003658 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
3659 __ j(above, &slowcase);
3660 // Smi-tagging is equivalent to multiplying by 2.
3661 STATIC_ASSERT(kSmiTag == 0);
3662 STATIC_ASSERT(kSmiTagSize == 1);
3663 // Allocate RegExpResult followed by FixedArray with size in ebx.
3664 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3665 // Elements: [Map][Length][..elements..]
3666 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
3667 times_half_pointer_size,
3668 ebx, // In: Number of elements (times 2, being a smi)
3669 eax, // Out: Start of allocation (tagged).
3670 ecx, // Out: End of allocation.
3671 edx, // Scratch register
3672 &slowcase,
3673 TAG_OBJECT);
3674 // eax: Start of allocated area, object-tagged.
3675
3676 // Set JSArray map to global.regexp_result_map().
3677 // Set empty properties FixedArray.
3678 // Set elements to point to FixedArray allocated right after the JSArray.
3679 // Interleave operations for better latency.
3680 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +01003681 Factory* factory = masm->isolate()->factory();
3682 __ mov(ecx, Immediate(factory->empty_fixed_array()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003683 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
3684 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
3685 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
3686 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
3687 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
3688 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
3689
3690 // Set input, index and length fields from arguments.
3691 __ mov(ecx, Operand(esp, kPointerSize * 1));
3692 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
3693 __ mov(ecx, Operand(esp, kPointerSize * 2));
3694 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
3695 __ mov(ecx, Operand(esp, kPointerSize * 3));
3696 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
3697
3698 // Fill out the elements FixedArray.
3699 // eax: JSArray.
3700 // ebx: FixedArray.
3701 // ecx: Number of elements in array, as smi.
3702
3703 // Set map.
3704 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003705 Immediate(factory->fixed_array_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003706 // Set length.
3707 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
3708 // Fill contents of fixed-array with the-hole.
3709 __ SmiUntag(ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01003710 __ mov(edx, Immediate(factory->the_hole_value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003711 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
3712 // Fill fixed array elements with hole.
3713 // eax: JSArray.
3714 // ecx: Number of elements to fill.
3715 // ebx: Start of elements in FixedArray.
3716 // edx: the hole.
3717 Label loop;
3718 __ test(ecx, Operand(ecx));
3719 __ bind(&loop);
Ben Murdoch257744e2011-11-30 15:57:28 +00003720 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721 __ sub(Operand(ecx), Immediate(1));
3722 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
3723 __ jmp(&loop);
3724
3725 __ bind(&done);
3726 __ ret(3 * kPointerSize);
3727
3728 __ bind(&slowcase);
3729 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3730}
3731
3732
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003733void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3734 Register object,
3735 Register result,
3736 Register scratch1,
3737 Register scratch2,
3738 bool object_is_smi,
3739 Label* not_found) {
3740 // Use of registers. Register result is used as a temporary.
3741 Register number_string_cache = result;
3742 Register mask = scratch1;
3743 Register scratch = scratch2;
3744
3745 // Load the number string cache.
Steve Block44f0eee2011-05-26 01:26:41 +01003746 ExternalReference roots_address =
3747 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003748 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
3749 __ mov(number_string_cache,
3750 Operand::StaticArray(scratch, times_pointer_size, roots_address));
3751 // Make the hash mask from the length of the number string cache. It
3752 // contains two elements (number and string) for each cache entry.
3753 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3754 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
3755 __ sub(Operand(mask), Immediate(1)); // Make mask.
3756
3757 // Calculate the entry in the number string cache. The hash value in the
3758 // number string cache for smis is just the smi value, and the hash for
3759 // doubles is the xor of the upper and lower words. See
3760 // Heap::GetNumberStringCache.
Ben Murdoch257744e2011-11-30 15:57:28 +00003761 Label smi_hash_calculated;
3762 Label load_result_from_cache;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003763 if (object_is_smi) {
3764 __ mov(scratch, object);
3765 __ SmiUntag(scratch);
3766 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003767 Label not_smi;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003768 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003769 __ JumpIfNotSmi(object, &not_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003770 __ mov(scratch, object);
3771 __ SmiUntag(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003772 __ jmp(&smi_hash_calculated, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003773 __ bind(&not_smi);
3774 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003775 masm->isolate()->factory()->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003776 __ j(not_equal, not_found);
3777 STATIC_ASSERT(8 == kDoubleSize);
3778 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3779 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3780 // Object is heap number and hash is now in scratch. Calculate cache index.
3781 __ and_(scratch, Operand(mask));
3782 Register index = scratch;
3783 Register probe = mask;
3784 __ mov(probe,
3785 FieldOperand(number_string_cache,
3786 index,
3787 times_twice_pointer_size,
3788 FixedArray::kHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003789 __ JumpIfSmi(probe, not_found);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003790 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003791 CpuFeatures::Scope fscope(SSE2);
3792 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3793 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
3794 __ ucomisd(xmm0, xmm1);
3795 } else {
3796 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3797 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3798 __ FCmp();
3799 }
3800 __ j(parity_even, not_found); // Bail out if NaN is involved.
3801 __ j(not_equal, not_found); // The cache did not contain this value.
Ben Murdoch257744e2011-11-30 15:57:28 +00003802 __ jmp(&load_result_from_cache, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003803 }
3804
3805 __ bind(&smi_hash_calculated);
3806 // Object is smi and hash is now in scratch. Calculate cache index.
3807 __ and_(scratch, Operand(mask));
3808 Register index = scratch;
3809 // Check if the entry is the smi we are looking for.
3810 __ cmp(object,
3811 FieldOperand(number_string_cache,
3812 index,
3813 times_twice_pointer_size,
3814 FixedArray::kHeaderSize));
3815 __ j(not_equal, not_found);
3816
3817 // Get the result from the cache.
3818 __ bind(&load_result_from_cache);
3819 __ mov(result,
3820 FieldOperand(number_string_cache,
3821 index,
3822 times_twice_pointer_size,
3823 FixedArray::kHeaderSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003824 Counters* counters = masm->isolate()->counters();
3825 __ IncrementCounter(counters->number_to_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003826}
3827
3828
3829void NumberToStringStub::Generate(MacroAssembler* masm) {
3830 Label runtime;
3831
3832 __ mov(ebx, Operand(esp, kPointerSize));
3833
3834 // Generate code to lookup number in the number string cache.
3835 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
3836 __ ret(1 * kPointerSize);
3837
3838 __ bind(&runtime);
3839 // Handle number to string in the runtime system if not found in the cache.
3840 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3841}
3842
3843
3844static int NegativeComparisonResult(Condition cc) {
3845 ASSERT(cc != equal);
3846 ASSERT((cc == less) || (cc == less_equal)
3847 || (cc == greater) || (cc == greater_equal));
3848 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3849}
3850
3851void CompareStub::Generate(MacroAssembler* masm) {
3852 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3853
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003854 Label check_unequal_objects;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003855
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003856 // Compare two smis if required.
3857 if (include_smi_compare_) {
3858 Label non_smi, smi_done;
3859 __ mov(ecx, Operand(edx));
3860 __ or_(ecx, Operand(eax));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003861 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003862 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003863 __ j(no_overflow, &smi_done, Label::kNear);
Ben Murdochf87a2032010-10-22 12:50:53 +01003864 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003865 __ bind(&smi_done);
3866 __ mov(eax, edx);
3867 __ ret(0);
3868 __ bind(&non_smi);
3869 } else if (FLAG_debug_code) {
3870 __ mov(ecx, Operand(edx));
3871 __ or_(ecx, Operand(eax));
3872 __ test(ecx, Immediate(kSmiTagMask));
3873 __ Assert(not_zero, "Unexpected smi operands.");
3874 }
3875
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003876 // NOTICE! This code is only reached after a smi-fast-case check, so
3877 // it is certain that at least one operand isn't a smi.
3878
3879 // Identical objects can be compared fast, but there are some tricky cases
3880 // for NaN and undefined.
3881 {
3882 Label not_identical;
3883 __ cmp(eax, Operand(edx));
3884 __ j(not_equal, &not_identical);
3885
3886 if (cc_ != equal) {
3887 // Check for undefined. undefined OP undefined is false even though
3888 // undefined == undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +00003889 Label check_for_nan;
Steve Block44f0eee2011-05-26 01:26:41 +01003890 __ cmp(edx, masm->isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003891 __ j(not_equal, &check_for_nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003892 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3893 __ ret(0);
3894 __ bind(&check_for_nan);
3895 }
3896
Steve Block44f0eee2011-05-26 01:26:41 +01003897 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003898 // so we do the second best thing - test it ourselves.
3899 // Note: if cc_ != equal, never_nan_nan_ is not used.
3900 if (never_nan_nan_ && (cc_ == equal)) {
3901 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3902 __ ret(0);
3903 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003904 Label heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003905 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003906 Immediate(masm->isolate()->factory()->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003907 __ j(equal, &heap_number, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003908 if (cc_ != equal) {
3909 // Call runtime on identical JSObjects. Otherwise return equal.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003910 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003911 __ j(above_equal, &not_identical);
3912 }
3913 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3914 __ ret(0);
3915
3916 __ bind(&heap_number);
3917 // It is a heap number, so return non-equal if it's NaN and equal if
3918 // it's not NaN.
3919 // The representation of NaN values has all exponent bits (52..62) set,
3920 // and not all mantissa bits (0..51) clear.
3921 // We only accept QNaNs, which have bit 51 set.
3922 // Read top bits of double representation (second word of value).
3923
3924 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
3925 // all bits in the mask are set. We only need to check the word
3926 // that contains the exponent and high bit of the mantissa.
3927 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
3928 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003929 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003930 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
3931 // bits.
3932 __ add(edx, Operand(edx));
3933 __ cmp(edx, kQuietNaNHighBitsMask << 1);
3934 if (cc_ == equal) {
3935 STATIC_ASSERT(EQUAL != 1);
3936 __ setcc(above_equal, eax);
3937 __ ret(0);
3938 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003939 Label nan;
3940 __ j(above_equal, &nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003941 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3942 __ ret(0);
3943 __ bind(&nan);
3944 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3945 __ ret(0);
3946 }
3947 }
3948
3949 __ bind(&not_identical);
3950 }
3951
3952 // Strict equality can quickly decide whether objects are equal.
3953 // Non-strict object equality is slower, so it is handled later in the stub.
3954 if (cc_ == equal && strict_) {
3955 Label slow; // Fallthrough label.
Ben Murdoch257744e2011-11-30 15:57:28 +00003956 Label not_smis;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003957 // If we're doing a strict equality comparison, we don't have to do
3958 // type conversion, so we generate code to do fast comparison for objects
3959 // and oddballs. Non-smi numbers and strings still go through the usual
3960 // slow-case code.
3961 // If either is a Smi (we know that not both are), then they can only
3962 // be equal if the other is a HeapNumber. If so, use the slow case.
3963 STATIC_ASSERT(kSmiTag == 0);
3964 ASSERT_EQ(0, Smi::FromInt(0));
3965 __ mov(ecx, Immediate(kSmiTagMask));
3966 __ and_(ecx, Operand(eax));
3967 __ test(ecx, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003968 __ j(not_zero, &not_smis, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003969 // One operand is a smi.
3970
3971 // Check whether the non-smi is a heap number.
3972 STATIC_ASSERT(kSmiTagMask == 1);
3973 // ecx still holds eax & kSmiTag, which is either zero or one.
3974 __ sub(Operand(ecx), Immediate(0x01));
3975 __ mov(ebx, edx);
3976 __ xor_(ebx, Operand(eax));
3977 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
3978 __ xor_(ebx, Operand(eax));
3979 // if eax was smi, ebx is now edx, else eax.
3980
3981 // Check if the non-smi operand is a heap number.
3982 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003983 Immediate(masm->isolate()->factory()->heap_number_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003984 // If heap number, handle it in the slow case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003985 __ j(equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003986 // Return non-equal (ebx is not zero)
3987 __ mov(eax, ebx);
3988 __ ret(0);
3989
3990 __ bind(&not_smis);
3991 // If either operand is a JSObject or an oddball value, then they are not
3992 // equal since their pointers are different
3993 // There is no test for undetectability in strict equality.
3994
3995 // Get the type of the first operand.
3996 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00003997 Label first_non_object;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003998 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
3999 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004000 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004001
4002 // Return non-zero (eax is not zero)
Ben Murdoch257744e2011-11-30 15:57:28 +00004003 Label return_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004004 STATIC_ASSERT(kHeapObjectTag != 0);
4005 __ bind(&return_not_equal);
4006 __ ret(0);
4007
4008 __ bind(&first_non_object);
4009 // Check for oddballs: true, false, null, undefined.
4010 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4011 __ j(equal, &return_not_equal);
4012
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004013 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004014 __ j(above_equal, &return_not_equal);
4015
4016 // Check for oddballs: true, false, null, undefined.
4017 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4018 __ j(equal, &return_not_equal);
4019
4020 // Fall through to the general case.
4021 __ bind(&slow);
4022 }
4023
4024 // Generate the number comparison code.
4025 if (include_number_compare_) {
4026 Label non_number_comparison;
4027 Label unordered;
Ben Murdoch8b112d22011-06-08 16:22:53 +01004028 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004029 CpuFeatures::Scope use_sse2(SSE2);
4030 CpuFeatures::Scope use_cmov(CMOV);
4031
4032 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4033 __ ucomisd(xmm0, xmm1);
4034
4035 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004036 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004037 // Return a result of -1, 0, or 1, based on EFLAGS.
4038 __ mov(eax, 0); // equal
4039 __ mov(ecx, Immediate(Smi::FromInt(1)));
4040 __ cmov(above, eax, Operand(ecx));
4041 __ mov(ecx, Immediate(Smi::FromInt(-1)));
4042 __ cmov(below, eax, Operand(ecx));
4043 __ ret(0);
4044 } else {
4045 FloatingPointHelper::CheckFloatOperands(
4046 masm, &non_number_comparison, ebx);
4047 FloatingPointHelper::LoadFloatOperand(masm, eax);
4048 FloatingPointHelper::LoadFloatOperand(masm, edx);
4049 __ FCmp();
4050
4051 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004052 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004053
Ben Murdoch257744e2011-11-30 15:57:28 +00004054 Label below_label, above_label;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004055 // Return a result of -1, 0, or 1, based on EFLAGS.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004056 __ j(below, &below_label, Label::kNear);
4057 __ j(above, &above_label, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004058
Steve Block9fac8402011-05-12 15:51:54 +01004059 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004060 __ ret(0);
4061
4062 __ bind(&below_label);
4063 __ mov(eax, Immediate(Smi::FromInt(-1)));
4064 __ ret(0);
4065
4066 __ bind(&above_label);
4067 __ mov(eax, Immediate(Smi::FromInt(1)));
4068 __ ret(0);
4069 }
4070
4071 // If one of the numbers was NaN, then the result is always false.
4072 // The cc is never not-equal.
4073 __ bind(&unordered);
4074 ASSERT(cc_ != not_equal);
4075 if (cc_ == less || cc_ == less_equal) {
4076 __ mov(eax, Immediate(Smi::FromInt(1)));
4077 } else {
4078 __ mov(eax, Immediate(Smi::FromInt(-1)));
4079 }
4080 __ ret(0);
4081
4082 // The number comparison code did not provide a valid result.
4083 __ bind(&non_number_comparison);
4084 }
4085
4086 // Fast negative check for symbol-to-symbol equality.
4087 Label check_for_strings;
4088 if (cc_ == equal) {
4089 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
4090 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
4091
4092 // We've already checked for object identity, so if both operands
4093 // are symbols they aren't equal. Register eax already holds a
4094 // non-zero value, which indicates not equal, so just return.
4095 __ ret(0);
4096 }
4097
4098 __ bind(&check_for_strings);
4099
4100 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
4101 &check_unequal_objects);
4102
4103 // Inline comparison of ascii strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00004104 if (cc_ == equal) {
4105 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004106 edx,
4107 eax,
4108 ecx,
Ben Murdoch257744e2011-11-30 15:57:28 +00004109 ebx);
4110 } else {
4111 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
4112 edx,
4113 eax,
4114 ecx,
4115 ebx,
4116 edi);
4117 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004118#ifdef DEBUG
4119 __ Abort("Unexpected fall-through from string comparison");
4120#endif
4121
4122 __ bind(&check_unequal_objects);
4123 if (cc_ == equal && !strict_) {
4124 // Non-strict equality. Objects are unequal if
4125 // they are both JSObjects and not undetectable,
4126 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00004127 Label not_both_objects;
4128 Label return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004129 // At most one is a smi, so we can test for smi by adding the two.
4130 // A smi plus a heap object has the low bit set, a heap object plus
4131 // a heap object has the low bit clear.
4132 STATIC_ASSERT(kSmiTag == 0);
4133 STATIC_ASSERT(kSmiTagMask == 1);
4134 __ lea(ecx, Operand(eax, edx, times_1, 0));
4135 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004136 __ j(not_zero, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004137 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004138 __ j(below, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004139 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004140 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004141 // We do not bail out after this point. Both are JSObjects, and
4142 // they are equal if and only if both are undetectable.
4143 // The and of the undetectable flags is 1 if and only if they are equal.
4144 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
4145 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00004146 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004147 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
4148 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00004149 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004150 // The objects are both undetectable, so they both compare as the value
4151 // undefined, and are equal.
4152 __ Set(eax, Immediate(EQUAL));
4153 __ bind(&return_unequal);
4154 // Return non-equal by returning the non-zero object pointer in eax,
4155 // or return equal if we fell through to here.
4156 __ ret(0); // rax, rdx were pushed
4157 __ bind(&not_both_objects);
4158 }
4159
4160 // Push arguments below the return address.
4161 __ pop(ecx);
4162 __ push(edx);
4163 __ push(eax);
4164
4165 // Figure out which native to call and setup the arguments.
4166 Builtins::JavaScript builtin;
4167 if (cc_ == equal) {
4168 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
4169 } else {
4170 builtin = Builtins::COMPARE;
4171 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4172 }
4173
4174 // Restore return address on the stack.
4175 __ push(ecx);
4176
4177 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
4178 // tagged as a small integer.
4179 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
4180}
4181
4182
4183void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
4184 Label* label,
4185 Register object,
4186 Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004187 __ JumpIfSmi(object, label);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004188 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
4189 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4190 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
4191 __ cmp(scratch, kSymbolTag | kStringTag);
4192 __ j(not_equal, label);
4193}
4194
4195
4196void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01004197 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004198}
4199
4200
4201void CallFunctionStub::Generate(MacroAssembler* masm) {
4202 Label slow;
4203
Ben Murdoch257744e2011-11-30 15:57:28 +00004204 // The receiver might implicitly be the global object. This is
4205 // indicated by passing the hole as the receiver to the call
4206 // function stub.
4207 if (ReceiverMightBeImplicit()) {
4208 Label call;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004209 // Get the receiver from the stack.
4210 // +1 ~ return address
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004211 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004212 // Call as function is indicated with the hole.
4213 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
4214 __ j(not_equal, &call, Label::kNear);
4215 // Patch the receiver on the stack with the global receiver object.
4216 __ mov(ebx, GlobalObjectOperand());
4217 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
4218 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx);
4219 __ bind(&call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004220 }
4221
4222 // Get the function to call from the stack.
4223 // +2 ~ receiver, return address
4224 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
4225
4226 // Check that the function really is a JavaScript function.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004227 __ JumpIfSmi(edi, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004228 // Goto slow case if we do not have a function.
4229 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004230 __ j(not_equal, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004231
4232 // Fast-case: Just invoke the function.
4233 ParameterCount actual(argc_);
Ben Murdoch257744e2011-11-30 15:57:28 +00004234
4235 if (ReceiverMightBeImplicit()) {
4236 Label call_as_function;
4237 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
4238 __ j(equal, &call_as_function);
4239 __ InvokeFunction(edi,
4240 actual,
4241 JUMP_FUNCTION,
4242 NullCallWrapper(),
4243 CALL_AS_METHOD);
4244 __ bind(&call_as_function);
4245 }
4246 __ InvokeFunction(edi,
4247 actual,
4248 JUMP_FUNCTION,
4249 NullCallWrapper(),
4250 CALL_AS_FUNCTION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004251
4252 // Slow-case: Non-function called.
4253 __ bind(&slow);
4254 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4255 // of the original receiver from the call site).
4256 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4257 __ Set(eax, Immediate(argc_));
4258 __ Set(ebx, Immediate(0));
4259 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01004260 Handle<Code> adaptor =
4261 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004262 __ SetCallKind(ecx, CALL_AS_METHOD);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004263 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4264}
4265
4266
Steve Block44f0eee2011-05-26 01:26:41 +01004267bool CEntryStub::NeedsImmovableCode() {
4268 return false;
4269}
4270
4271
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004272void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004273 __ Throw(eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004274}
4275
4276
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004277void CEntryStub::GenerateCore(MacroAssembler* masm,
4278 Label* throw_normal_exception,
4279 Label* throw_termination_exception,
4280 Label* throw_out_of_memory_exception,
4281 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01004282 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004283 // eax: result parameter for PerformGC, if any
4284 // ebx: pointer to C function (C callee-saved)
4285 // ebp: frame pointer (restored after C call)
4286 // esp: stack pointer (restored after C call)
4287 // edi: number of arguments including receiver (C callee-saved)
4288 // esi: pointer to the first argument (C callee-saved)
4289
4290 // Result returned in eax, or eax+edx if result_size_ is 2.
4291
4292 // Check stack alignment.
4293 if (FLAG_debug_code) {
4294 __ CheckStackAlignment();
4295 }
4296
4297 if (do_gc) {
4298 // Pass failure code returned from last attempt as first argument to
4299 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4300 // stack alignment is known to be correct. This function takes one argument
4301 // which is passed on the stack, and we know that the stack has been
4302 // prepared to pass at least one argument.
4303 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4304 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4305 }
4306
4307 ExternalReference scope_depth =
Steve Block44f0eee2011-05-26 01:26:41 +01004308 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004309 if (always_allocate_scope) {
4310 __ inc(Operand::StaticVariable(scope_depth));
4311 }
4312
4313 // Call C function.
4314 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4315 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
Steve Block44f0eee2011-05-26 01:26:41 +01004316 __ mov(Operand(esp, 2 * kPointerSize),
4317 Immediate(ExternalReference::isolate_address()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004318 __ call(Operand(ebx));
4319 // Result is in eax or edx:eax - do not destroy these registers!
4320
4321 if (always_allocate_scope) {
4322 __ dec(Operand::StaticVariable(scope_depth));
4323 }
4324
4325 // Make sure we're not trying to return 'the hole' from the runtime
4326 // call as this may lead to crashes in the IC code later.
4327 if (FLAG_debug_code) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004328 Label okay;
Steve Block44f0eee2011-05-26 01:26:41 +01004329 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004330 __ j(not_equal, &okay, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004331 __ int3();
4332 __ bind(&okay);
4333 }
4334
4335 // Check for failure result.
4336 Label failure_returned;
4337 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4338 __ lea(ecx, Operand(eax, 1));
4339 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4340 __ test(ecx, Immediate(kFailureTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004341 __ j(zero, &failure_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004342
Steve Block44f0eee2011-05-26 01:26:41 +01004343 ExternalReference pending_exception_address(
4344 Isolate::k_pending_exception_address, masm->isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01004345
4346 // Check that there is no pending exception, otherwise we
4347 // should have returned some failure value.
4348 if (FLAG_debug_code) {
4349 __ push(edx);
4350 __ mov(edx, Operand::StaticVariable(
Steve Block44f0eee2011-05-26 01:26:41 +01004351 ExternalReference::the_hole_value_location(masm->isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004352 Label okay;
Steve Block1e0659c2011-05-24 12:43:12 +01004353 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
4354 // Cannot use check here as it attempts to generate call into runtime.
Ben Murdoch257744e2011-11-30 15:57:28 +00004355 __ j(equal, &okay, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004356 __ int3();
4357 __ bind(&okay);
4358 __ pop(edx);
4359 }
4360
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004361 // Exit the JavaScript to C++ exit frame.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004362 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004363 __ ret(0);
4364
4365 // Handling of failure.
4366 __ bind(&failure_returned);
4367
4368 Label retry;
4369 // If the returned exception is RETRY_AFTER_GC continue at retry label
4370 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4371 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004372 __ j(zero, &retry, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004373
4374 // Special handling of out of memory exceptions.
4375 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4376 __ j(equal, throw_out_of_memory_exception);
4377
4378 // Retrieve the pending exception and clear the variable.
Steve Block44f0eee2011-05-26 01:26:41 +01004379 ExternalReference the_hole_location =
4380 ExternalReference::the_hole_value_location(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004381 __ mov(eax, Operand::StaticVariable(pending_exception_address));
Steve Block44f0eee2011-05-26 01:26:41 +01004382 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004383 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4384
4385 // Special handling of termination exceptions which are uncatchable
4386 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01004387 __ cmp(eax, masm->isolate()->factory()->termination_exception());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004388 __ j(equal, throw_termination_exception);
4389
4390 // Handle normal exception.
4391 __ jmp(throw_normal_exception);
4392
4393 // Retry.
4394 __ bind(&retry);
4395}
4396
4397
4398void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
4399 UncatchableExceptionType type) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004400 __ ThrowUncatchable(type, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004401}
4402
4403
4404void CEntryStub::Generate(MacroAssembler* masm) {
4405 // eax: number of arguments including receiver
4406 // ebx: pointer to C function (C callee-saved)
4407 // ebp: frame pointer (restored after C call)
4408 // esp: stack pointer (restored after C call)
4409 // esi: current context (C callee-saved)
4410 // edi: JS function of the caller (C callee-saved)
4411
4412 // NOTE: Invocations of builtins may return failure objects instead
4413 // of a proper result. The builtin entry handles this by performing
4414 // a garbage collection and retrying the builtin (twice).
4415
4416 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004417 __ EnterExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004418
4419 // eax: result parameter for PerformGC, if any (setup below)
4420 // ebx: pointer to builtin function (C callee-saved)
4421 // ebp: frame pointer (restored after C call)
4422 // esp: stack pointer (restored after C call)
4423 // edi: number of arguments including receiver (C callee-saved)
4424 // esi: argv pointer (C callee-saved)
4425
4426 Label throw_normal_exception;
4427 Label throw_termination_exception;
4428 Label throw_out_of_memory_exception;
4429
4430 // Call into the runtime system.
4431 GenerateCore(masm,
4432 &throw_normal_exception,
4433 &throw_termination_exception,
4434 &throw_out_of_memory_exception,
4435 false,
4436 false);
4437
4438 // Do space-specific GC and retry runtime call.
4439 GenerateCore(masm,
4440 &throw_normal_exception,
4441 &throw_termination_exception,
4442 &throw_out_of_memory_exception,
4443 true,
4444 false);
4445
4446 // Do full GC and retry runtime call one final time.
4447 Failure* failure = Failure::InternalError();
4448 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
4449 GenerateCore(masm,
4450 &throw_normal_exception,
4451 &throw_termination_exception,
4452 &throw_out_of_memory_exception,
4453 true,
4454 true);
4455
4456 __ bind(&throw_out_of_memory_exception);
4457 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
4458
4459 __ bind(&throw_termination_exception);
4460 GenerateThrowUncatchable(masm, TERMINATION);
4461
4462 __ bind(&throw_normal_exception);
4463 GenerateThrowTOS(masm);
4464}
4465
4466
4467void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
4468 Label invoke, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004469 Label not_outermost_js, not_outermost_js_2;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004470
4471 // Setup frame.
4472 __ push(ebp);
4473 __ mov(ebp, Operand(esp));
4474
4475 // Push marker in two places.
4476 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4477 __ push(Immediate(Smi::FromInt(marker))); // context slot
4478 __ push(Immediate(Smi::FromInt(marker))); // function slot
4479 // Save callee-saved registers (C calling conventions).
4480 __ push(edi);
4481 __ push(esi);
4482 __ push(ebx);
4483
4484 // Save copies of the top frame descriptor on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01004485 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004486 __ push(Operand::StaticVariable(c_entry_fp));
4487
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004488 // If this is the outermost JS call, set js_entry_sp value.
Steve Block44f0eee2011-05-26 01:26:41 +01004489 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4490 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004491 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004492 __ j(not_equal, &not_outermost_js, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004493 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
Steve Block053d10c2011-06-13 19:13:29 +01004494 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4495 Label cont;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004496 __ jmp(&cont, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004497 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01004498 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4499 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004500
4501 // Call a faked try-block that does the invoke.
4502 __ call(&invoke);
4503
4504 // Caught exception: Store result (exception) in the pending
4505 // exception field in the JSEnv and return a failure sentinel.
Steve Block44f0eee2011-05-26 01:26:41 +01004506 ExternalReference pending_exception(Isolate::k_pending_exception_address,
4507 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004508 __ mov(Operand::StaticVariable(pending_exception), eax);
4509 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4510 __ jmp(&exit);
4511
4512 // Invoke: Link this frame into the handler chain.
4513 __ bind(&invoke);
4514 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4515
4516 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01004517 ExternalReference the_hole_location =
4518 ExternalReference::the_hole_value_location(masm->isolate());
4519 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004520 __ mov(Operand::StaticVariable(pending_exception), edx);
4521
4522 // Fake a receiver (NULL).
4523 __ push(Immediate(0)); // receiver
4524
4525 // Invoke the function by calling through JS entry trampoline
4526 // builtin and pop the faked function when we return. Notice that we
4527 // cannot store a reference to the trampoline code directly in this
4528 // stub, because the builtin stubs may not have been generated yet.
4529 if (is_construct) {
Steve Block44f0eee2011-05-26 01:26:41 +01004530 ExternalReference construct_entry(
4531 Builtins::kJSConstructEntryTrampoline,
4532 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004533 __ mov(edx, Immediate(construct_entry));
4534 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004535 ExternalReference entry(Builtins::kJSEntryTrampoline,
4536 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004537 __ mov(edx, Immediate(entry));
4538 }
4539 __ mov(edx, Operand(edx, 0)); // deref address
4540 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4541 __ call(Operand(edx));
4542
4543 // Unlink this frame from the handler chain.
Steve Block053d10c2011-06-13 19:13:29 +01004544 __ PopTryHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004545
Steve Block053d10c2011-06-13 19:13:29 +01004546 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01004547 // Check if the current stack frame is marked as the outermost JS frame.
4548 __ pop(ebx);
4549 __ cmp(Operand(ebx),
4550 Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004551 __ j(not_equal, &not_outermost_js_2);
4552 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4553 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004554
4555 // Restore the top frame descriptor from the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01004556 __ pop(Operand::StaticVariable(ExternalReference(
4557 Isolate::k_c_entry_fp_address,
4558 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004559
4560 // Restore callee-saved registers (C calling conventions).
4561 __ pop(ebx);
4562 __ pop(esi);
4563 __ pop(edi);
4564 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4565
4566 // Restore frame pointer and return.
4567 __ pop(ebp);
4568 __ ret(0);
4569}
4570
4571
Ben Murdoch086aeea2011-05-13 15:57:08 +01004572// Generate stub code for instanceof.
4573// This code can patch a call site inlined cache of the instance of check,
4574// which looks like this.
4575//
4576// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4577// 75 0a jne <some near label>
4578// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4579//
4580// If call site patching is requested the stack will have the delta from the
4581// return address to the cmp instruction just below the return address. This
4582// also means that call site patching can only take place with arguments in
4583// registers. TOS looks like this when call site patching is requested
4584//
4585// esp[0] : return address
4586// esp[4] : delta from return address to cmp instruction
4587//
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004588void InstanceofStub::Generate(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004589 // Call site inlining and patching implies arguments in registers.
4590 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4591
Ben Murdochb0fe1622011-05-05 13:52:32 +01004592 // Fixed register usage throughout the stub.
4593 Register object = eax; // Object (lhs).
4594 Register map = ebx; // Map of the object.
4595 Register function = edx; // Function (rhs).
4596 Register prototype = edi; // Prototype of the function.
4597 Register scratch = ecx;
4598
Ben Murdoch086aeea2011-05-13 15:57:08 +01004599 // Constants describing the call site code to patch.
4600 static const int kDeltaToCmpImmediate = 2;
4601 static const int kDeltaToMov = 8;
4602 static const int kDeltaToMovImmediate = 9;
4603 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
4604 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
4605 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
4606
Steve Block44f0eee2011-05-26 01:26:41 +01004607 ExternalReference roots_address =
4608 ExternalReference::roots_address(masm->isolate());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004609
4610 ASSERT_EQ(object.code(), InstanceofStub::left().code());
4611 ASSERT_EQ(function.code(), InstanceofStub::right().code());
4612
Ben Murdochb0fe1622011-05-05 13:52:32 +01004613 // Get the object and function - they are always both needed.
4614 Label slow, not_js_object;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004615 if (!HasArgsInRegisters()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004616 __ mov(object, Operand(esp, 2 * kPointerSize));
4617 __ mov(function, Operand(esp, 1 * kPointerSize));
4618 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004619
4620 // Check that the left hand is a JS object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004621 __ JumpIfSmi(object, &not_js_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004622 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004623
Ben Murdoch086aeea2011-05-13 15:57:08 +01004624 // If there is a call site cache don't look in the global cache, but do the
4625 // real lookup and update the call site cache.
4626 if (!HasCallSiteInlineCheck()) {
4627 // Look up the function and the map in the instanceof cache.
Ben Murdoch257744e2011-11-30 15:57:28 +00004628 Label miss;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004629 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4630 __ cmp(function,
4631 Operand::StaticArray(scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004632 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004633 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4634 __ cmp(map, Operand::StaticArray(
4635 scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004636 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004637 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4638 __ mov(eax, Operand::StaticArray(
4639 scratch, times_pointer_size, roots_address));
4640 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4641 __ bind(&miss);
4642 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004643
Ben Murdochb0fe1622011-05-05 13:52:32 +01004644 // Get the prototype of the function.
4645 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004646
4647 // Check that the function prototype is a JS object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004648 __ JumpIfSmi(prototype, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004649 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004650
Ben Murdoch086aeea2011-05-13 15:57:08 +01004651 // Update the global instanceof or call site inlined cache with the current
4652 // map and function. The cached answer will be set when it is known below.
4653 if (!HasCallSiteInlineCheck()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004654 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4655 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
4656 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4657 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
4658 function);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004659 } else {
4660 // The constants for the code patching are based on no push instructions
4661 // at the call site.
4662 ASSERT(HasArgsInRegisters());
4663 // Get return address and delta to inlined map check.
4664 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4665 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4666 if (FLAG_debug_code) {
4667 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
4668 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
4669 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
4670 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
4671 }
4672 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
4673 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004674
Ben Murdochb0fe1622011-05-05 13:52:32 +01004675 // Loop through the prototype chain of the object looking for the function
4676 // prototype.
4677 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004678 Label loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004679 __ bind(&loop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004680 __ cmp(scratch, Operand(prototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00004681 __ j(equal, &is_instance, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004682 Factory* factory = masm->isolate()->factory();
4683 __ cmp(Operand(scratch), Immediate(factory->null_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004684 __ j(equal, &is_not_instance, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004685 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4686 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004687 __ jmp(&loop);
4688
4689 __ bind(&is_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004690 if (!HasCallSiteInlineCheck()) {
4691 __ Set(eax, Immediate(0));
4692 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4693 __ mov(Operand::StaticArray(scratch,
4694 times_pointer_size, roots_address), eax);
4695 } else {
4696 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004697 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004698 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4699 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4700 if (FLAG_debug_code) {
4701 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4702 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4703 }
4704 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4705 if (!ReturnTrueFalseObject()) {
4706 __ Set(eax, Immediate(0));
4707 }
4708 }
4709 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004710
4711 __ bind(&is_not_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004712 if (!HasCallSiteInlineCheck()) {
4713 __ Set(eax, Immediate(Smi::FromInt(1)));
4714 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4715 __ mov(Operand::StaticArray(
4716 scratch, times_pointer_size, roots_address), eax);
4717 } else {
4718 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004719 __ mov(eax, factory->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004720 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4721 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4722 if (FLAG_debug_code) {
4723 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4724 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4725 }
4726 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4727 if (!ReturnTrueFalseObject()) {
4728 __ Set(eax, Immediate(Smi::FromInt(1)));
4729 }
4730 }
4731 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004732
4733 Label object_not_null, object_not_null_or_smi;
4734 __ bind(&not_js_object);
4735 // Before null, smi and string value checks, check that the rhs is a function
4736 // as for a non-function rhs an exception needs to be thrown.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004737 __ JumpIfSmi(function, &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004738 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004739 __ j(not_equal, &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004740
4741 // Null is not instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01004742 __ cmp(object, factory->null_value());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004743 __ j(not_equal, &object_not_null, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004744 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004745 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004746
4747 __ bind(&object_not_null);
4748 // Smi values is not instance of anything.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004749 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004750 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004751 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004752
4753 __ bind(&object_not_null_or_smi);
4754 // String values is not instance of anything.
4755 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004756 __ j(NegateCondition(is_string), &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004757 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004758 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004759
4760 // Slow-case: Go through the JavaScript implementation.
4761 __ bind(&slow);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004762 if (!ReturnTrueFalseObject()) {
4763 // Tail call the builtin which returns 0 or 1.
4764 if (HasArgsInRegisters()) {
4765 // Push arguments below return address.
4766 __ pop(scratch);
4767 __ push(object);
4768 __ push(function);
4769 __ push(scratch);
4770 }
4771 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
4772 } else {
4773 // Call the builtin and convert 0/1 to true/false.
4774 __ EnterInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004775 __ push(object);
4776 __ push(function);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004777 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
4778 __ LeaveInternalFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004779 Label true_value, done;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004780 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00004781 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004782 __ mov(eax, factory->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004783 __ jmp(&done, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004784 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01004785 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004786 __ bind(&done);
4787 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004788 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004789}
4790
4791
Ben Murdoch086aeea2011-05-13 15:57:08 +01004792Register InstanceofStub::left() { return eax; }
4793
4794
4795Register InstanceofStub::right() { return edx; }
4796
4797
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004798int CompareStub::MinorKey() {
4799 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
4800 // stubs the never NaN NaN condition is only taken into account if the
4801 // condition is equals.
4802 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4803 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4804 return ConditionField::encode(static_cast<unsigned>(cc_))
4805 | RegisterField::encode(false) // lhs_ and rhs_ are not used
4806 | StrictField::encode(strict_)
4807 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004808 | IncludeNumberCompareField::encode(include_number_compare_)
4809 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004810}
4811
4812
4813// Unfortunately you have to run without snapshots to see most of these
4814// names in the profile since most compare stubs end up in the snapshot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004815void CompareStub::PrintName(StringStream* stream) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004816 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004817 const char* cc_name;
4818 switch (cc_) {
4819 case less: cc_name = "LT"; break;
4820 case greater: cc_name = "GT"; break;
4821 case less_equal: cc_name = "LE"; break;
4822 case greater_equal: cc_name = "GE"; break;
4823 case equal: cc_name = "EQ"; break;
4824 case not_equal: cc_name = "NE"; break;
4825 default: cc_name = "UnknownCondition"; break;
4826 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004827 bool is_equality = cc_ == equal || cc_ == not_equal;
4828 stream->Add("CompareStub_%s", cc_name);
4829 if (strict_ && is_equality) stream->Add("_STRICT");
4830 if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4831 if (!include_number_compare_) stream->Add("_NO_NUMBER");
4832 if (!include_smi_compare_) stream->Add("_NO_SMI");
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004833}
4834
4835
4836// -------------------------------------------------------------------------
4837// StringCharCodeAtGenerator
4838
4839void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
4840 Label flat_string;
4841 Label ascii_string;
4842 Label got_char_code;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004843 Label sliced_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004844
4845 // If the receiver is a smi trigger the non-string case.
4846 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004847 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004848
4849 // Fetch the instance type of the receiver into result register.
4850 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4851 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4852 // If the receiver is not a string trigger the non-string case.
4853 __ test(result_, Immediate(kIsNotStringMask));
4854 __ j(not_zero, receiver_not_string_);
4855
4856 // If the index is non-smi trigger the non-smi case.
4857 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004858 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004859
4860 // Put smi-tagged index into scratch register.
4861 __ mov(scratch_, index_);
4862 __ bind(&got_smi_index_);
4863
4864 // Check for index out of range.
4865 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
4866 __ j(above_equal, index_out_of_range_);
4867
4868 // We need special handling for non-flat strings.
4869 STATIC_ASSERT(kSeqStringTag == 0);
4870 __ test(result_, Immediate(kStringRepresentationMask));
4871 __ j(zero, &flat_string);
4872
4873 // Handle non-flat strings.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004874 __ and_(result_, kStringRepresentationMask);
4875 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4876 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4877 __ cmp(result_, kExternalStringTag);
4878 __ j(greater, &sliced_string, Label::kNear);
4879 __ j(equal, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004880
4881 // ConsString.
4882 // Check whether the right hand side is the empty string (i.e. if
4883 // this is really a flat string in a cons string). If that is not
4884 // the case we would rather go to the runtime system now to flatten
4885 // the string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004886 Label assure_seq_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004887 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01004888 Immediate(masm->isolate()->factory()->empty_string()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004889 __ j(not_equal, &call_runtime_);
4890 // Get the first of the two strings and load its instance type.
4891 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004892 __ jmp(&assure_seq_string, Label::kNear);
4893
4894 // SlicedString, unpack and add offset.
4895 __ bind(&sliced_string);
4896 __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
4897 __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
4898
4899 // Assure that we are dealing with a sequential string. Go to runtime if not.
4900 __ bind(&assure_seq_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004901 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4902 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004903 STATIC_ASSERT(kSeqStringTag == 0);
4904 __ test(result_, Immediate(kStringRepresentationMask));
4905 __ j(not_zero, &call_runtime_);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004906 __ jmp(&flat_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004907
4908 // Check for 1-byte or 2-byte string.
4909 __ bind(&flat_string);
4910 STATIC_ASSERT(kAsciiStringTag != 0);
4911 __ test(result_, Immediate(kStringEncodingMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004912 __ j(not_zero, &ascii_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004913
4914 // 2-byte string.
4915 // Load the 2-byte character code into the result register.
4916 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4917 __ movzx_w(result_, FieldOperand(object_,
4918 scratch_, times_1, // Scratch is smi-tagged.
4919 SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004920 __ jmp(&got_char_code, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004921
4922 // ASCII string.
4923 // Load the byte into the result register.
4924 __ bind(&ascii_string);
4925 __ SmiUntag(scratch_);
4926 __ movzx_b(result_, FieldOperand(object_,
4927 scratch_, times_1,
4928 SeqAsciiString::kHeaderSize));
4929 __ bind(&got_char_code);
4930 __ SmiTag(result_);
4931 __ bind(&exit_);
4932}
4933
4934
4935void StringCharCodeAtGenerator::GenerateSlow(
4936 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4937 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
4938
4939 // Index is not a smi.
4940 __ bind(&index_not_smi_);
4941 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01004942 __ CheckMap(index_,
4943 masm->isolate()->factory()->heap_number_map(),
4944 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00004945 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004946 call_helper.BeforeCall(masm);
4947 __ push(object_);
4948 __ push(index_);
4949 __ push(index_); // Consumed by runtime conversion function.
4950 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
4951 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4952 } else {
4953 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
4954 // NumberToSmi discards numbers that are not exact integers.
4955 __ CallRuntime(Runtime::kNumberToSmi, 1);
4956 }
4957 if (!scratch_.is(eax)) {
4958 // Save the conversion result before the pop instructions below
4959 // have a chance to overwrite it.
4960 __ mov(scratch_, eax);
4961 }
4962 __ pop(index_);
4963 __ pop(object_);
4964 // Reload the instance type.
4965 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4966 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4967 call_helper.AfterCall(masm);
4968 // If index is still not a smi, it must be out of range.
4969 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004970 __ JumpIfNotSmi(scratch_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004971 // Otherwise, return to the fast path.
4972 __ jmp(&got_smi_index_);
4973
4974 // Call runtime. We get here when the receiver is a string and the
4975 // index is a number, but the code of getting the actual character
4976 // is too complex (e.g., when the string needs to be flattened).
4977 __ bind(&call_runtime_);
4978 call_helper.BeforeCall(masm);
4979 __ push(object_);
4980 __ push(index_);
4981 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4982 if (!result_.is(eax)) {
4983 __ mov(result_, eax);
4984 }
4985 call_helper.AfterCall(masm);
4986 __ jmp(&exit_);
4987
4988 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
4989}
4990
4991
4992// -------------------------------------------------------------------------
4993// StringCharFromCodeGenerator
4994
4995void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4996 // Fast case of Heap::LookupSingleCharacterStringFromCode.
4997 STATIC_ASSERT(kSmiTag == 0);
4998 STATIC_ASSERT(kSmiShiftSize == 0);
4999 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
5000 __ test(code_,
5001 Immediate(kSmiTagMask |
5002 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00005003 __ j(not_zero, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005004
Steve Block44f0eee2011-05-26 01:26:41 +01005005 Factory* factory = masm->isolate()->factory();
5006 __ Set(result_, Immediate(factory->single_character_string_cache()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005007 STATIC_ASSERT(kSmiTag == 0);
5008 STATIC_ASSERT(kSmiTagSize == 1);
5009 STATIC_ASSERT(kSmiShiftSize == 0);
5010 // At this point code register contains smi tagged ascii char code.
5011 __ mov(result_, FieldOperand(result_,
5012 code_, times_half_pointer_size,
5013 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005014 __ cmp(result_, factory->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00005015 __ j(equal, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005016 __ bind(&exit_);
5017}
5018
5019
5020void StringCharFromCodeGenerator::GenerateSlow(
5021 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5022 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5023
5024 __ bind(&slow_case_);
5025 call_helper.BeforeCall(masm);
5026 __ push(code_);
5027 __ CallRuntime(Runtime::kCharFromCode, 1);
5028 if (!result_.is(eax)) {
5029 __ mov(result_, eax);
5030 }
5031 call_helper.AfterCall(masm);
5032 __ jmp(&exit_);
5033
5034 __ Abort("Unexpected fallthrough from CharFromCode slow case");
5035}
5036
5037
5038// -------------------------------------------------------------------------
5039// StringCharAtGenerator
5040
5041void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
5042 char_code_at_generator_.GenerateFast(masm);
5043 char_from_code_generator_.GenerateFast(masm);
5044}
5045
5046
5047void StringCharAtGenerator::GenerateSlow(
5048 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
5049 char_code_at_generator_.GenerateSlow(masm, call_helper);
5050 char_from_code_generator_.GenerateSlow(masm, call_helper);
5051}
5052
5053
5054void StringAddStub::Generate(MacroAssembler* masm) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01005055 Label string_add_runtime, call_builtin;
5056 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005057
5058 // Load the two arguments.
5059 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5060 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5061
5062 // Make sure that both arguments are strings if not known in advance.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005063 if (flags_ == NO_STRING_ADD_FLAGS) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005064 __ JumpIfSmi(eax, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005065 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
5066 __ j(above_equal, &string_add_runtime);
5067
5068 // First argument is a a string, test second.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005069 __ JumpIfSmi(edx, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005070 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
5071 __ j(above_equal, &string_add_runtime);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005072 } else {
5073 // Here at least one of the arguments is definitely a string.
5074 // We convert the one that is not known to be a string.
5075 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
5076 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
5077 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
5078 &call_builtin);
5079 builtin_id = Builtins::STRING_ADD_RIGHT;
5080 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
5081 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
5082 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5083 &call_builtin);
5084 builtin_id = Builtins::STRING_ADD_LEFT;
5085 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005086 }
5087
5088 // Both arguments are strings.
5089 // eax: first string
5090 // edx: second string
5091 // Check if either of the strings are empty. In that case return the other.
Ben Murdoch257744e2011-11-30 15:57:28 +00005092 Label second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005093 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
5094 STATIC_ASSERT(kSmiTag == 0);
5095 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00005096 __ j(not_zero, &second_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005097 // Second string is empty, result is first string which is already in eax.
Steve Block44f0eee2011-05-26 01:26:41 +01005098 Counters* counters = masm->isolate()->counters();
5099 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005100 __ ret(2 * kPointerSize);
5101 __ bind(&second_not_zero_length);
5102 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
5103 STATIC_ASSERT(kSmiTag == 0);
5104 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00005105 __ j(not_zero, &both_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005106 // First string is empty, result is second string which is in edx.
5107 __ mov(eax, edx);
Steve Block44f0eee2011-05-26 01:26:41 +01005108 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005109 __ ret(2 * kPointerSize);
5110
5111 // Both strings are non-empty.
5112 // eax: first string
5113 // ebx: length of first string as a smi
5114 // ecx: length of second string as a smi
5115 // edx: second string
5116 // Look at the length of the result of adding the two strings.
5117 Label string_add_flat_result, longer_than_two;
5118 __ bind(&both_not_zero_length);
5119 __ add(ebx, Operand(ecx));
5120 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
5121 // Handle exceptionally long strings in the runtime system.
5122 __ j(overflow, &string_add_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +01005123 // Use the symbol table when adding two one character strings, as it
5124 // helps later optimizations to return a symbol here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005125 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
5126 __ j(not_equal, &longer_than_two);
5127
5128 // Check that both strings are non-external ascii strings.
5129 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
5130 &string_add_runtime);
5131
Iain Merrick9ac36c92010-09-13 15:29:50 +01005132 // Get the two characters forming the new string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005133 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5134 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5135
5136 // Try to lookup two character string in symbol table. If it is not found
5137 // just allocate a new one.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005138 Label make_two_character_string, make_two_character_string_no_reload;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005139 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Iain Merrick9ac36c92010-09-13 15:29:50 +01005140 masm, ebx, ecx, eax, edx, edi,
5141 &make_two_character_string_no_reload, &make_two_character_string);
Steve Block44f0eee2011-05-26 01:26:41 +01005142 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005143 __ ret(2 * kPointerSize);
5144
Iain Merrick9ac36c92010-09-13 15:29:50 +01005145 // Allocate a two character string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005146 __ bind(&make_two_character_string);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005147 // Reload the arguments.
5148 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5149 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5150 // Get the two characters forming the new string.
5151 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5152 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5153 __ bind(&make_two_character_string_no_reload);
Steve Block44f0eee2011-05-26 01:26:41 +01005154 __ IncrementCounter(counters->string_add_make_two_char(), 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005155 __ AllocateAsciiString(eax, // Result.
5156 2, // Length.
5157 edi, // Scratch 1.
5158 edx, // Scratch 2.
5159 &string_add_runtime);
5160 // Pack both characters in ebx.
5161 __ shl(ecx, kBitsPerByte);
5162 __ or_(ebx, Operand(ecx));
5163 // Set the characters in the new string.
5164 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
Steve Block44f0eee2011-05-26 01:26:41 +01005165 __ IncrementCounter(counters->string_add_native(), 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005166 __ ret(2 * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005167
5168 __ bind(&longer_than_two);
5169 // Check if resulting string will be flat.
5170 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
5171 __ j(below, &string_add_flat_result);
5172
5173 // If result is not supposed to be flat allocate a cons string object. If both
5174 // strings are ascii the result is an ascii cons string.
5175 Label non_ascii, allocated, ascii_data;
5176 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
5177 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
5178 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
5179 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
5180 __ and_(ecx, Operand(edi));
5181 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5182 __ test(ecx, Immediate(kAsciiStringTag));
5183 __ j(zero, &non_ascii);
5184 __ bind(&ascii_data);
5185 // Allocate an acsii cons string.
5186 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
5187 __ bind(&allocated);
5188 // Fill the fields of the cons string.
5189 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
5190 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
5191 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
5192 Immediate(String::kEmptyHashField));
5193 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
5194 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
5195 __ mov(eax, ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01005196 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005197 __ ret(2 * kPointerSize);
5198 __ bind(&non_ascii);
5199 // At least one of the strings is two-byte. Check whether it happens
5200 // to contain only ascii characters.
5201 // ecx: first instance type AND second instance type.
5202 // edi: second instance type.
5203 __ test(ecx, Immediate(kAsciiDataHintMask));
5204 __ j(not_zero, &ascii_data);
5205 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5206 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5207 __ xor_(edi, Operand(ecx));
5208 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
5209 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
5210 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
5211 __ j(equal, &ascii_data);
5212 // Allocate a two byte cons string.
5213 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
5214 __ jmp(&allocated);
5215
5216 // Handle creating a flat result. First check that both strings are not
5217 // external strings.
5218 // eax: first string
5219 // ebx: length of resulting flat string as a smi
5220 // edx: second string
5221 __ bind(&string_add_flat_result);
5222 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5223 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5224 __ and_(ecx, kStringRepresentationMask);
5225 __ cmp(ecx, kExternalStringTag);
5226 __ j(equal, &string_add_runtime);
5227 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5228 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5229 __ and_(ecx, kStringRepresentationMask);
5230 __ cmp(ecx, kExternalStringTag);
5231 __ j(equal, &string_add_runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005232 // We cannot encounter sliced strings here since:
5233 STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005234 // Now check if both strings are ascii strings.
5235 // eax: first string
5236 // ebx: length of resulting flat string as a smi
5237 // edx: second string
5238 Label non_ascii_string_add_flat_result;
5239 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
5240 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5241 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5242 __ j(zero, &non_ascii_string_add_flat_result);
5243 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5244 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5245 __ j(zero, &string_add_runtime);
5246
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005247 // Both strings are ascii strings. As they are short they are both flat.
5248 // ebx: length of resulting flat string as a smi
5249 __ SmiUntag(ebx);
5250 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5251 // eax: result string
5252 __ mov(ecx, eax);
5253 // Locate first character of result.
5254 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5255 // Load first argument and locate first character.
5256 __ mov(edx, Operand(esp, 2 * kPointerSize));
5257 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5258 __ SmiUntag(edi);
5259 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5260 // eax: result string
5261 // ecx: first character of result
5262 // edx: first char of first argument
5263 // edi: length of first argument
5264 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
5265 // Load second argument and locate first character.
5266 __ mov(edx, Operand(esp, 1 * kPointerSize));
5267 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5268 __ SmiUntag(edi);
5269 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5270 // eax: result string
5271 // ecx: next character of result
5272 // edx: first char of second argument
5273 // edi: length of second argument
5274 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Block44f0eee2011-05-26 01:26:41 +01005275 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005276 __ ret(2 * kPointerSize);
5277
5278 // Handle creating a flat two byte result.
5279 // eax: first string - known to be two byte
5280 // ebx: length of resulting flat string as a smi
5281 // edx: second string
5282 __ bind(&non_ascii_string_add_flat_result);
5283 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5284 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
5285 __ j(not_zero, &string_add_runtime);
5286 // Both strings are two byte strings. As they are short they are both
5287 // flat.
5288 __ SmiUntag(ebx);
5289 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
5290 // eax: result string
5291 __ mov(ecx, eax);
5292 // Locate first character of result.
5293 __ add(Operand(ecx),
5294 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5295 // Load first argument and locate first character.
5296 __ mov(edx, Operand(esp, 2 * kPointerSize));
5297 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5298 __ SmiUntag(edi);
5299 __ add(Operand(edx),
5300 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5301 // eax: result string
5302 // ecx: first character of result
5303 // edx: first char of first argument
5304 // edi: length of first argument
5305 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
5306 // Load second argument and locate first character.
5307 __ mov(edx, Operand(esp, 1 * kPointerSize));
5308 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5309 __ SmiUntag(edi);
5310 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5311 // eax: result string
5312 // ecx: next character of result
5313 // edx: first char of second argument
5314 // edi: length of second argument
5315 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Block44f0eee2011-05-26 01:26:41 +01005316 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005317 __ ret(2 * kPointerSize);
5318
5319 // Just jump to runtime to add the two strings.
5320 __ bind(&string_add_runtime);
5321 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005322
5323 if (call_builtin.is_linked()) {
5324 __ bind(&call_builtin);
5325 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
5326 }
5327}
5328
5329
5330void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5331 int stack_offset,
5332 Register arg,
5333 Register scratch1,
5334 Register scratch2,
5335 Register scratch3,
5336 Label* slow) {
5337 // First check if the argument is already a string.
5338 Label not_string, done;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005339 __ JumpIfSmi(arg, &not_string);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005340 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5341 __ j(below, &done);
5342
5343 // Check the number to string cache.
5344 Label not_cached;
5345 __ bind(&not_string);
5346 // Puts the cached result into scratch1.
5347 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5348 arg,
5349 scratch1,
5350 scratch2,
5351 scratch3,
5352 false,
5353 &not_cached);
5354 __ mov(arg, scratch1);
5355 __ mov(Operand(esp, stack_offset), arg);
5356 __ jmp(&done);
5357
5358 // Check if the argument is a safe string wrapper.
5359 __ bind(&not_cached);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005360 __ JumpIfSmi(arg, slow);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005361 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5362 __ j(not_equal, slow);
5363 __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
5364 1 << Map::kStringWrapperSafeForDefaultValueOf);
5365 __ j(zero, slow);
5366 __ mov(arg, FieldOperand(arg, JSValue::kValueOffset));
5367 __ mov(Operand(esp, stack_offset), arg);
5368
5369 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005370}
5371
5372
5373void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5374 Register dest,
5375 Register src,
5376 Register count,
5377 Register scratch,
5378 bool ascii) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005379 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005380 __ bind(&loop);
5381 // This loop just copies one character at a time, as it is only used for very
5382 // short strings.
5383 if (ascii) {
5384 __ mov_b(scratch, Operand(src, 0));
5385 __ mov_b(Operand(dest, 0), scratch);
5386 __ add(Operand(src), Immediate(1));
5387 __ add(Operand(dest), Immediate(1));
5388 } else {
5389 __ mov_w(scratch, Operand(src, 0));
5390 __ mov_w(Operand(dest, 0), scratch);
5391 __ add(Operand(src), Immediate(2));
5392 __ add(Operand(dest), Immediate(2));
5393 }
5394 __ sub(Operand(count), Immediate(1));
5395 __ j(not_zero, &loop);
5396}
5397
5398
5399void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
5400 Register dest,
5401 Register src,
5402 Register count,
5403 Register scratch,
5404 bool ascii) {
5405 // Copy characters using rep movs of doublewords.
5406 // The destination is aligned on a 4 byte boundary because we are
5407 // copying to the beginning of a newly allocated string.
5408 ASSERT(dest.is(edi)); // rep movs destination
5409 ASSERT(src.is(esi)); // rep movs source
5410 ASSERT(count.is(ecx)); // rep movs count
5411 ASSERT(!scratch.is(dest));
5412 ASSERT(!scratch.is(src));
5413 ASSERT(!scratch.is(count));
5414
5415 // Nothing to do for zero characters.
5416 Label done;
5417 __ test(count, Operand(count));
5418 __ j(zero, &done);
5419
5420 // Make count the number of bytes to copy.
5421 if (!ascii) {
5422 __ shl(count, 1);
5423 }
5424
5425 // Don't enter the rep movs if there are less than 4 bytes to copy.
Ben Murdoch257744e2011-11-30 15:57:28 +00005426 Label last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005427 __ test(count, Immediate(~3));
Ben Murdoch257744e2011-11-30 15:57:28 +00005428 __ j(zero, &last_bytes, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005429
5430 // Copy from edi to esi using rep movs instruction.
5431 __ mov(scratch, count);
5432 __ sar(count, 2); // Number of doublewords to copy.
5433 __ cld();
5434 __ rep_movs();
5435
5436 // Find number of bytes left.
5437 __ mov(count, scratch);
5438 __ and_(count, 3);
5439
5440 // Check if there are more bytes to copy.
5441 __ bind(&last_bytes);
5442 __ test(count, Operand(count));
5443 __ j(zero, &done);
5444
5445 // Copy remaining characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00005446 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005447 __ bind(&loop);
5448 __ mov_b(scratch, Operand(src, 0));
5449 __ mov_b(Operand(dest, 0), scratch);
5450 __ add(Operand(src), Immediate(1));
5451 __ add(Operand(dest), Immediate(1));
5452 __ sub(Operand(count), Immediate(1));
5453 __ j(not_zero, &loop);
5454
5455 __ bind(&done);
5456}
5457
5458
5459void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5460 Register c1,
5461 Register c2,
5462 Register scratch1,
5463 Register scratch2,
5464 Register scratch3,
Iain Merrick9ac36c92010-09-13 15:29:50 +01005465 Label* not_probed,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005466 Label* not_found) {
5467 // Register scratch3 is the general scratch register in this function.
5468 Register scratch = scratch3;
5469
5470 // Make sure that both characters are not digits as such strings has a
5471 // different hash algorithm. Don't try to look for these in the symbol table.
Ben Murdoch257744e2011-11-30 15:57:28 +00005472 Label not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005473 __ mov(scratch, c1);
5474 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5475 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Ben Murdoch257744e2011-11-30 15:57:28 +00005476 __ j(above, &not_array_index, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005477 __ mov(scratch, c2);
5478 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5479 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Iain Merrick9ac36c92010-09-13 15:29:50 +01005480 __ j(below_equal, not_probed);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005481
5482 __ bind(&not_array_index);
5483 // Calculate the two character string hash.
5484 Register hash = scratch1;
5485 GenerateHashInit(masm, hash, c1, scratch);
5486 GenerateHashAddCharacter(masm, hash, c2, scratch);
5487 GenerateHashGetHash(masm, hash, scratch);
5488
5489 // Collect the two characters in a register.
5490 Register chars = c1;
5491 __ shl(c2, kBitsPerByte);
5492 __ or_(chars, Operand(c2));
5493
5494 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5495 // hash: hash of two character string.
5496
5497 // Load the symbol table.
5498 Register symbol_table = c2;
Steve Block44f0eee2011-05-26 01:26:41 +01005499 ExternalReference roots_address =
5500 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005501 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5502 __ mov(symbol_table,
5503 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5504
5505 // Calculate capacity mask from the symbol table capacity.
5506 Register mask = scratch2;
5507 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5508 __ SmiUntag(mask);
5509 __ sub(Operand(mask), Immediate(1));
5510
5511 // Registers
5512 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5513 // hash: hash of two character string
5514 // symbol_table: symbol table
5515 // mask: capacity mask
5516 // scratch: -
5517
5518 // Perform a number of probes in the symbol table.
5519 static const int kProbes = 4;
5520 Label found_in_symbol_table;
5521 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
5522 for (int i = 0; i < kProbes; i++) {
5523 // Calculate entry in symbol table.
5524 __ mov(scratch, hash);
5525 if (i > 0) {
5526 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
5527 }
5528 __ and_(scratch, Operand(mask));
5529
5530 // Load the entry from the symbol table.
5531 Register candidate = scratch; // Scratch register contains candidate.
5532 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5533 __ mov(candidate,
5534 FieldOperand(symbol_table,
5535 scratch,
5536 times_pointer_size,
5537 SymbolTable::kElementsStartOffset));
5538
5539 // If entry is undefined no string with this hash can be found.
Steve Block44f0eee2011-05-26 01:26:41 +01005540 Factory* factory = masm->isolate()->factory();
5541 __ cmp(candidate, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005542 __ j(equal, not_found);
Steve Block44f0eee2011-05-26 01:26:41 +01005543 __ cmp(candidate, factory->null_value());
5544 __ j(equal, &next_probe[i]);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005545
5546 // If length is not 2 the string is not a candidate.
5547 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5548 Immediate(Smi::FromInt(2)));
5549 __ j(not_equal, &next_probe[i]);
5550
5551 // As we are out of registers save the mask on the stack and use that
5552 // register as a temporary.
5553 __ push(mask);
5554 Register temp = mask;
5555
5556 // Check that the candidate is a non-external ascii string.
5557 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
5558 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5559 __ JumpIfInstanceTypeIsNotSequentialAscii(
5560 temp, temp, &next_probe_pop_mask[i]);
5561
5562 // Check if the two characters match.
5563 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5564 __ and_(temp, 0x0000ffff);
5565 __ cmp(chars, Operand(temp));
5566 __ j(equal, &found_in_symbol_table);
5567 __ bind(&next_probe_pop_mask[i]);
5568 __ pop(mask);
5569 __ bind(&next_probe[i]);
5570 }
5571
5572 // No matching 2 character string found by probing.
5573 __ jmp(not_found);
5574
5575 // Scratch register contains result when we fall through to here.
5576 Register result = scratch;
5577 __ bind(&found_in_symbol_table);
5578 __ pop(mask); // Pop saved mask from the stack.
5579 if (!result.is(eax)) {
5580 __ mov(eax, result);
5581 }
5582}
5583
5584
5585void StringHelper::GenerateHashInit(MacroAssembler* masm,
5586 Register hash,
5587 Register character,
5588 Register scratch) {
5589 // hash = character + (character << 10);
5590 __ mov(hash, character);
5591 __ shl(hash, 10);
5592 __ add(hash, Operand(character));
5593 // hash ^= hash >> 6;
5594 __ mov(scratch, hash);
5595 __ sar(scratch, 6);
5596 __ xor_(hash, Operand(scratch));
5597}
5598
5599
5600void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
5601 Register hash,
5602 Register character,
5603 Register scratch) {
5604 // hash += character;
5605 __ add(hash, Operand(character));
5606 // hash += hash << 10;
5607 __ mov(scratch, hash);
5608 __ shl(scratch, 10);
5609 __ add(hash, Operand(scratch));
5610 // hash ^= hash >> 6;
5611 __ mov(scratch, hash);
5612 __ sar(scratch, 6);
5613 __ xor_(hash, Operand(scratch));
5614}
5615
5616
5617void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
5618 Register hash,
5619 Register scratch) {
5620 // hash += hash << 3;
5621 __ mov(scratch, hash);
5622 __ shl(scratch, 3);
5623 __ add(hash, Operand(scratch));
5624 // hash ^= hash >> 11;
5625 __ mov(scratch, hash);
5626 __ sar(scratch, 11);
5627 __ xor_(hash, Operand(scratch));
5628 // hash += hash << 15;
5629 __ mov(scratch, hash);
5630 __ shl(scratch, 15);
5631 __ add(hash, Operand(scratch));
5632
5633 // if (hash == 0) hash = 27;
Ben Murdoch257744e2011-11-30 15:57:28 +00005634 Label hash_not_zero;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005635 __ test(hash, Operand(hash));
Ben Murdoch257744e2011-11-30 15:57:28 +00005636 __ j(not_zero, &hash_not_zero, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005637 __ mov(hash, Immediate(27));
5638 __ bind(&hash_not_zero);
5639}
5640
5641
5642void SubStringStub::Generate(MacroAssembler* masm) {
5643 Label runtime;
5644
5645 // Stack frame on entry.
5646 // esp[0]: return address
5647 // esp[4]: to
5648 // esp[8]: from
5649 // esp[12]: string
5650
5651 // Make sure first argument is a string.
5652 __ mov(eax, Operand(esp, 3 * kPointerSize));
5653 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005654 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005655 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
5656 __ j(NegateCondition(is_string), &runtime);
5657
5658 // eax: string
5659 // ebx: instance type
5660
5661 // Calculate length of sub string using the smi values.
5662 Label result_longer_than_two;
5663 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005664 __ JumpIfNotSmi(ecx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005665 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005666 __ JumpIfNotSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005667 __ sub(ecx, Operand(edx));
5668 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
5669 Label return_eax;
5670 __ j(equal, &return_eax);
5671 // Special handling of sub-strings of length 1 and 2. One character strings
5672 // are handled in the runtime system (looked up in the single character
5673 // cache). Two character strings are looked for in the symbol cache.
5674 __ SmiUntag(ecx); // Result length is no longer smi.
5675 __ cmp(ecx, 2);
5676 __ j(greater, &result_longer_than_two);
5677 __ j(less, &runtime);
5678
5679 // Sub string of length 2 requested.
5680 // eax: string
5681 // ebx: instance type
5682 // ecx: sub string length (value is 2)
5683 // edx: from index (smi)
5684 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
5685
5686 // Get the two characters forming the sub string.
5687 __ SmiUntag(edx); // From index is no longer smi.
5688 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
5689 __ movzx_b(ecx,
5690 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
5691
5692 // Try to lookup two character string in symbol table.
5693 Label make_two_character_string;
5694 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Iain Merrick9ac36c92010-09-13 15:29:50 +01005695 masm, ebx, ecx, eax, edx, edi,
5696 &make_two_character_string, &make_two_character_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005697 __ ret(3 * kPointerSize);
5698
5699 __ bind(&make_two_character_string);
5700 // Setup registers for allocating the two character string.
5701 __ mov(eax, Operand(esp, 3 * kPointerSize));
5702 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
5703 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
5704 __ Set(ecx, Immediate(2));
5705
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005706 if (FLAG_string_slices) {
5707 Label copy_routine;
5708 // If coming from the make_two_character_string path, the string
5709 // is too short to be sliced anyways.
5710 STATIC_ASSERT(2 < SlicedString::kMinLength);
5711 __ jmp(&copy_routine);
5712 __ bind(&result_longer_than_two);
5713
5714 // eax: string
5715 // ebx: instance type
5716 // ecx: sub string length
5717 // edx: from index (smi)
5718 Label allocate_slice, sliced_string, seq_string;
5719 __ cmp(ecx, SlicedString::kMinLength);
5720 // Short slice. Copy instead of slicing.
5721 __ j(less, &copy_routine);
5722 STATIC_ASSERT(kSeqStringTag == 0);
5723 __ test(ebx, Immediate(kStringRepresentationMask));
5724 __ j(zero, &seq_string, Label::kNear);
5725 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
5726 STATIC_ASSERT(kIsIndirectStringMask != 0);
5727 __ test(ebx, Immediate(kIsIndirectStringMask));
5728 // External string. Jump to runtime.
5729 __ j(zero, &runtime);
5730
5731 Factory* factory = masm->isolate()->factory();
5732 __ test(ebx, Immediate(kSlicedNotConsMask));
5733 __ j(not_zero, &sliced_string, Label::kNear);
5734 // Cons string. Check whether it is flat, then fetch first part.
5735 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
5736 factory->empty_string());
5737 __ j(not_equal, &runtime);
5738 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
5739 __ jmp(&allocate_slice, Label::kNear);
5740
5741 __ bind(&sliced_string);
5742 // Sliced string. Fetch parent and correct start index by offset.
5743 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
5744 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
5745 __ jmp(&allocate_slice, Label::kNear);
5746
5747 __ bind(&seq_string);
5748 // Sequential string. Just move string to the right register.
5749 __ mov(edi, eax);
5750
5751 __ bind(&allocate_slice);
5752 // edi: underlying subject string
5753 // ebx: instance type of original subject string
5754 // edx: offset
5755 // ecx: length
5756 // Allocate new sliced string. At this point we do not reload the instance
5757 // type including the string encoding because we simply rely on the info
5758 // provided by the original string. It does not matter if the original
5759 // string's encoding is wrong because we always have to recheck encoding of
5760 // the newly created string's parent anyways due to externalized strings.
5761 Label two_byte_slice, set_slice_header;
5762 STATIC_ASSERT(kAsciiStringTag != 0);
5763 __ test(ebx, Immediate(kAsciiStringTag));
5764 __ j(zero, &two_byte_slice, Label::kNear);
5765 __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
5766 __ jmp(&set_slice_header, Label::kNear);
5767 __ bind(&two_byte_slice);
5768 __ AllocateSlicedString(eax, ebx, no_reg, &runtime);
5769 __ bind(&set_slice_header);
5770 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
5771 __ SmiTag(ecx);
5772 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
5773 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
5774 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
5775 Immediate(String::kEmptyHashField));
5776 __ jmp(&return_eax);
5777
5778 __ bind(&copy_routine);
5779 } else {
5780 __ bind(&result_longer_than_two);
5781 }
5782
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005783 // eax: string
5784 // ebx: instance type
5785 // ecx: result string length
5786 // Check for flat ascii string
5787 Label non_ascii_flat;
5788 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
5789
5790 // Allocate the result.
5791 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
5792
5793 // eax: result string
5794 // ecx: result string length
5795 __ mov(edx, esi); // esi used by following code.
5796 // Locate first character of result.
5797 __ mov(edi, eax);
5798 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5799 // Load string argument and locate character of sub string start.
5800 __ mov(esi, Operand(esp, 3 * kPointerSize));
5801 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5802 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
5803 __ SmiUntag(ebx);
5804 __ add(esi, Operand(ebx));
5805
5806 // eax: result string
5807 // ecx: result length
5808 // edx: original value of esi
5809 // edi: first character of result
5810 // esi: character of sub string start
5811 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
5812 __ mov(esi, edx); // Restore esi.
Steve Block44f0eee2011-05-26 01:26:41 +01005813 Counters* counters = masm->isolate()->counters();
5814 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005815 __ ret(3 * kPointerSize);
5816
5817 __ bind(&non_ascii_flat);
5818 // eax: string
5819 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
5820 // ecx: result string length
5821 // Check for flat two byte string
5822 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
5823 __ j(not_equal, &runtime);
5824
5825 // Allocate the result.
5826 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
5827
5828 // eax: result string
5829 // ecx: result string length
5830 __ mov(edx, esi); // esi used by following code.
5831 // Locate first character of result.
5832 __ mov(edi, eax);
5833 __ add(Operand(edi),
5834 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5835 // Load string argument and locate character of sub string start.
5836 __ mov(esi, Operand(esp, 3 * kPointerSize));
5837 __ add(Operand(esi),
5838 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5839 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
5840 // As from is a smi it is 2 times the value which matches the size of a two
5841 // byte character.
5842 STATIC_ASSERT(kSmiTag == 0);
5843 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5844 __ add(esi, Operand(ebx));
5845
5846 // eax: result string
5847 // ecx: result length
5848 // edx: original value of esi
5849 // edi: first character of result
5850 // esi: character of sub string start
5851 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
5852 __ mov(esi, edx); // Restore esi.
5853
5854 __ bind(&return_eax);
Steve Block44f0eee2011-05-26 01:26:41 +01005855 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005856 __ ret(3 * kPointerSize);
5857
5858 // Just jump to runtime to create the sub string.
5859 __ bind(&runtime);
5860 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5861}
5862
5863
Ben Murdoch257744e2011-11-30 15:57:28 +00005864void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
5865 Register left,
5866 Register right,
5867 Register scratch1,
5868 Register scratch2) {
5869 Register length = scratch1;
5870
5871 // Compare lengths.
5872 Label strings_not_equal, check_zero_length;
5873 __ mov(length, FieldOperand(left, String::kLengthOffset));
5874 __ cmp(length, FieldOperand(right, String::kLengthOffset));
5875 __ j(equal, &check_zero_length, Label::kNear);
5876 __ bind(&strings_not_equal);
5877 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
5878 __ ret(0);
5879
5880 // Check if the length is zero.
5881 Label compare_chars;
5882 __ bind(&check_zero_length);
5883 STATIC_ASSERT(kSmiTag == 0);
5884 __ test(length, Operand(length));
5885 __ j(not_zero, &compare_chars, Label::kNear);
5886 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5887 __ ret(0);
5888
5889 // Compare characters.
5890 __ bind(&compare_chars);
5891 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5892 &strings_not_equal, Label::kNear);
5893
5894 // Characters are equal.
5895 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5896 __ ret(0);
5897}
5898
5899
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005900void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
5901 Register left,
5902 Register right,
5903 Register scratch1,
5904 Register scratch2,
5905 Register scratch3) {
Steve Block44f0eee2011-05-26 01:26:41 +01005906 Counters* counters = masm->isolate()->counters();
5907 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005908
5909 // Find minimum length.
Ben Murdoch257744e2011-11-30 15:57:28 +00005910 Label left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005911 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
5912 __ mov(scratch3, scratch1);
5913 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
5914
5915 Register length_delta = scratch3;
5916
Ben Murdoch257744e2011-11-30 15:57:28 +00005917 __ j(less_equal, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005918 // Right string is shorter. Change scratch1 to be length of right string.
5919 __ sub(scratch1, Operand(length_delta));
5920 __ bind(&left_shorter);
5921
5922 Register min_length = scratch1;
5923
5924 // If either length is zero, just compare lengths.
Ben Murdoch257744e2011-11-30 15:57:28 +00005925 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005926 __ test(min_length, Operand(min_length));
Ben Murdoch257744e2011-11-30 15:57:28 +00005927 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005928
Ben Murdoch257744e2011-11-30 15:57:28 +00005929 // Compare characters.
5930 Label result_not_equal;
5931 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5932 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005933
5934 // Compare lengths - strings up to min-length are equal.
5935 __ bind(&compare_lengths);
5936 __ test(length_delta, Operand(length_delta));
Ben Murdoch257744e2011-11-30 15:57:28 +00005937 __ j(not_zero, &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005938
5939 // Result is EQUAL.
5940 STATIC_ASSERT(EQUAL == 0);
5941 STATIC_ASSERT(kSmiTag == 0);
5942 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5943 __ ret(0);
5944
Ben Murdoch257744e2011-11-30 15:57:28 +00005945 Label result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005946 __ bind(&result_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00005947 __ j(greater, &result_greater, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005948
5949 // Result is LESS.
5950 __ Set(eax, Immediate(Smi::FromInt(LESS)));
5951 __ ret(0);
5952
5953 // Result is GREATER.
5954 __ bind(&result_greater);
5955 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
5956 __ ret(0);
5957}
5958
5959
Ben Murdoch257744e2011-11-30 15:57:28 +00005960void StringCompareStub::GenerateAsciiCharsCompareLoop(
5961 MacroAssembler* masm,
5962 Register left,
5963 Register right,
5964 Register length,
5965 Register scratch,
5966 Label* chars_not_equal,
5967 Label::Distance chars_not_equal_near) {
5968 // Change index to run from -length to -1 by adding length to string
5969 // start. This means that loop ends when index reaches zero, which
5970 // doesn't need an additional compare.
5971 __ SmiUntag(length);
5972 __ lea(left,
5973 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
5974 __ lea(right,
5975 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
5976 __ neg(length);
5977 Register index = length; // index = -length;
5978
5979 // Compare loop.
5980 Label loop;
5981 __ bind(&loop);
5982 __ mov_b(scratch, Operand(left, index, times_1, 0));
5983 __ cmpb(scratch, Operand(right, index, times_1, 0));
5984 __ j(not_equal, chars_not_equal, chars_not_equal_near);
5985 __ add(Operand(index), Immediate(1));
5986 __ j(not_zero, &loop);
5987}
5988
5989
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005990void StringCompareStub::Generate(MacroAssembler* masm) {
5991 Label runtime;
5992
5993 // Stack frame on entry.
5994 // esp[0]: return address
5995 // esp[4]: right string
5996 // esp[8]: left string
5997
5998 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
5999 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
6000
Ben Murdoch257744e2011-11-30 15:57:28 +00006001 Label not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006002 __ cmp(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00006003 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006004 STATIC_ASSERT(EQUAL == 0);
6005 STATIC_ASSERT(kSmiTag == 0);
6006 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Steve Block44f0eee2011-05-26 01:26:41 +01006007 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006008 __ ret(2 * kPointerSize);
6009
6010 __ bind(&not_same);
6011
6012 // Check that both objects are sequential ascii strings.
6013 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
6014
6015 // Compare flat ascii strings.
6016 // Drop arguments from the stack.
6017 __ pop(ecx);
6018 __ add(Operand(esp), Immediate(2 * kPointerSize));
6019 __ push(ecx);
6020 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
6021
6022 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
6023 // tagged as a small integer.
6024 __ bind(&runtime);
6025 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6026}
6027
Ben Murdochb0fe1622011-05-05 13:52:32 +01006028
Ben Murdochb0fe1622011-05-05 13:52:32 +01006029void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6030 ASSERT(state_ == CompareIC::SMIS);
Ben Murdoch257744e2011-11-30 15:57:28 +00006031 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01006032 __ mov(ecx, Operand(edx));
6033 __ or_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006034 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006035
6036 if (GetCondition() == equal) {
6037 // For equality we do not care about the sign of the result.
6038 __ sub(eax, Operand(edx));
6039 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00006040 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01006041 __ sub(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00006042 __ j(no_overflow, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006043 // Correct sign of result in case of overflow.
6044 __ not_(edx);
6045 __ bind(&done);
6046 __ mov(eax, edx);
6047 }
6048 __ ret(0);
6049
6050 __ bind(&miss);
6051 GenerateMiss(masm);
6052}
6053
6054
6055void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6056 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
6057
Ben Murdoch257744e2011-11-30 15:57:28 +00006058 Label generic_stub;
6059 Label unordered;
6060 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01006061 __ mov(ecx, Operand(edx));
6062 __ and_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006063 __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006064
6065 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006066 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006067 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006068 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006069
6070 // Inlining the double comparison and falling back to the general compare
6071 // stub if NaN is involved or SS2 or CMOV is unsupported.
Ben Murdoch8b112d22011-06-08 16:22:53 +01006072 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01006073 CpuFeatures::Scope scope1(SSE2);
6074 CpuFeatures::Scope scope2(CMOV);
6075
6076 // Load left and right operand
6077 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
6078 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
6079
6080 // Compare operands
6081 __ ucomisd(xmm0, xmm1);
6082
6083 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00006084 __ j(parity_even, &unordered, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006085
6086 // Return a result of -1, 0, or 1, based on EFLAGS.
6087 // Performing mov, because xor would destroy the flag register.
6088 __ mov(eax, 0); // equal
6089 __ mov(ecx, Immediate(Smi::FromInt(1)));
6090 __ cmov(above, eax, Operand(ecx));
6091 __ mov(ecx, Immediate(Smi::FromInt(-1)));
6092 __ cmov(below, eax, Operand(ecx));
6093 __ ret(0);
6094
6095 __ bind(&unordered);
6096 }
6097
6098 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
6099 __ bind(&generic_stub);
6100 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
6101
6102 __ bind(&miss);
6103 GenerateMiss(masm);
6104}
6105
6106
Ben Murdoch257744e2011-11-30 15:57:28 +00006107void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6108 ASSERT(state_ == CompareIC::SYMBOLS);
6109 ASSERT(GetCondition() == equal);
6110
6111 // Registers containing left and right operands respectively.
6112 Register left = edx;
6113 Register right = eax;
6114 Register tmp1 = ecx;
6115 Register tmp2 = ebx;
6116
6117 // Check that both operands are heap objects.
6118 Label miss;
6119 __ mov(tmp1, Operand(left));
6120 STATIC_ASSERT(kSmiTag == 0);
6121 __ and_(tmp1, Operand(right));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006122 __ JumpIfSmi(tmp1, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00006123
6124 // Check that both operands are symbols.
6125 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6126 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6127 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6128 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6129 STATIC_ASSERT(kSymbolTag != 0);
6130 __ and_(tmp1, Operand(tmp2));
6131 __ test(tmp1, Immediate(kIsSymbolMask));
6132 __ j(zero, &miss, Label::kNear);
6133
6134 // Symbols are compared by identity.
6135 Label done;
6136 __ cmp(left, Operand(right));
6137 // Make sure eax is non-zero. At this point input operands are
6138 // guaranteed to be non-zero.
6139 ASSERT(right.is(eax));
6140 __ j(not_equal, &done, Label::kNear);
6141 STATIC_ASSERT(EQUAL == 0);
6142 STATIC_ASSERT(kSmiTag == 0);
6143 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6144 __ bind(&done);
6145 __ ret(0);
6146
6147 __ bind(&miss);
6148 GenerateMiss(masm);
6149}
6150
6151
6152void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6153 ASSERT(state_ == CompareIC::STRINGS);
6154 ASSERT(GetCondition() == equal);
6155 Label miss;
6156
6157 // Registers containing left and right operands respectively.
6158 Register left = edx;
6159 Register right = eax;
6160 Register tmp1 = ecx;
6161 Register tmp2 = ebx;
6162 Register tmp3 = edi;
6163
6164 // Check that both operands are heap objects.
6165 __ mov(tmp1, Operand(left));
6166 STATIC_ASSERT(kSmiTag == 0);
6167 __ and_(tmp1, Operand(right));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006168 __ JumpIfSmi(tmp1, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00006169
6170 // Check that both operands are strings. This leaves the instance
6171 // types loaded in tmp1 and tmp2.
6172 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6173 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6174 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6175 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6176 __ mov(tmp3, tmp1);
6177 STATIC_ASSERT(kNotStringTag != 0);
6178 __ or_(tmp3, Operand(tmp2));
6179 __ test(tmp3, Immediate(kIsNotStringMask));
6180 __ j(not_zero, &miss);
6181
6182 // Fast check for identical strings.
6183 Label not_same;
6184 __ cmp(left, Operand(right));
6185 __ j(not_equal, &not_same, Label::kNear);
6186 STATIC_ASSERT(EQUAL == 0);
6187 STATIC_ASSERT(kSmiTag == 0);
6188 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6189 __ ret(0);
6190
6191 // Handle not identical strings.
6192 __ bind(&not_same);
6193
6194 // Check that both strings are symbols. If they are, we're done
6195 // because we already know they are not identical.
6196 Label do_compare;
6197 STATIC_ASSERT(kSymbolTag != 0);
6198 __ and_(tmp1, Operand(tmp2));
6199 __ test(tmp1, Immediate(kIsSymbolMask));
6200 __ j(zero, &do_compare, Label::kNear);
6201 // Make sure eax is non-zero. At this point input operands are
6202 // guaranteed to be non-zero.
6203 ASSERT(right.is(eax));
6204 __ ret(0);
6205
6206 // Check that both strings are sequential ASCII.
6207 Label runtime;
6208 __ bind(&do_compare);
6209 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
6210
6211 // Compare flat ASCII strings. Returns when done.
6212 StringCompareStub::GenerateFlatAsciiStringEquals(
6213 masm, left, right, tmp1, tmp2);
6214
6215 // Handle more complex cases in runtime.
6216 __ bind(&runtime);
6217 __ pop(tmp1); // Return address.
6218 __ push(left);
6219 __ push(right);
6220 __ push(tmp1);
6221 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
6222
6223 __ bind(&miss);
6224 GenerateMiss(masm);
6225}
6226
6227
Ben Murdochb0fe1622011-05-05 13:52:32 +01006228void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6229 ASSERT(state_ == CompareIC::OBJECTS);
Ben Murdoch257744e2011-11-30 15:57:28 +00006230 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01006231 __ mov(ecx, Operand(edx));
6232 __ and_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006233 __ JumpIfSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006234
6235 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006236 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006237 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006238 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006239
6240 ASSERT(GetCondition() == equal);
6241 __ sub(eax, Operand(edx));
6242 __ ret(0);
6243
6244 __ bind(&miss);
6245 GenerateMiss(masm);
6246}
6247
6248
6249void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6250 // Save the registers.
6251 __ pop(ecx);
6252 __ push(edx);
6253 __ push(eax);
6254 __ push(ecx);
6255
6256 // Call the runtime system in a fresh internal frame.
Steve Block44f0eee2011-05-26 01:26:41 +01006257 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
6258 masm->isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01006259 __ EnterInternalFrame();
6260 __ push(edx);
6261 __ push(eax);
6262 __ push(Immediate(Smi::FromInt(op_)));
6263 __ CallExternalReference(miss, 3);
6264 __ LeaveInternalFrame();
6265
6266 // Compute the entry point of the rewritten stub.
6267 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
6268
6269 // Restore registers.
6270 __ pop(ecx);
6271 __ pop(eax);
6272 __ pop(edx);
6273 __ push(ecx);
6274
6275 // Do a tail call to the rewritten stub.
6276 __ jmp(Operand(edi));
6277}
6278
6279
Ben Murdoch257744e2011-11-30 15:57:28 +00006280// Helper function used to check that the dictionary doesn't contain
6281// the property. This function may return false negatives, so miss_label
6282// must always call a backup property check that is complete.
6283// This function is safe to call if the receiver has fast properties.
6284// Name must be a symbol and receiver must be a heap object.
6285MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
6286 MacroAssembler* masm,
6287 Label* miss,
6288 Label* done,
6289 Register properties,
6290 String* name,
6291 Register r0) {
6292 ASSERT(name->IsSymbol());
6293
6294 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6295 // not equal to the name and kProbes-th slot is not used (its name is the
6296 // undefined value), it guarantees the hash table doesn't contain the
6297 // property. It's true even if some slots represent deleted properties
6298 // (their names are the null value).
6299 for (int i = 0; i < kInlinedProbes; i++) {
6300 // Compute the masked index: (hash + i + i * i) & mask.
6301 Register index = r0;
6302 // Capacity is smi 2^n.
6303 __ mov(index, FieldOperand(properties, kCapacityOffset));
6304 __ dec(index);
6305 __ and_(Operand(index),
6306 Immediate(Smi::FromInt(name->Hash() +
6307 StringDictionary::GetProbeOffset(i))));
6308
6309 // Scale the index by multiplying by the entry size.
6310 ASSERT(StringDictionary::kEntrySize == 3);
6311 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
6312 Register entity_name = r0;
6313 // Having undefined at this place means the name is not contained.
6314 ASSERT_EQ(kSmiTagSize, 1);
6315 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
6316 kElementsStartOffset - kHeapObjectTag));
6317 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6318 __ j(equal, done);
6319
6320 // Stop if found the property.
6321 __ cmp(entity_name, Handle<String>(name));
6322 __ j(equal, miss);
6323
6324 // Check if the entry name is not a symbol.
6325 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
6326 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
6327 kIsSymbolMask);
6328 __ j(zero, miss);
6329 }
6330
6331 StringDictionaryLookupStub stub(properties,
6332 r0,
6333 r0,
6334 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
6335 __ push(Immediate(Handle<Object>(name)));
6336 __ push(Immediate(name->Hash()));
6337 MaybeObject* result = masm->TryCallStub(&stub);
6338 if (result->IsFailure()) return result;
6339 __ test(r0, Operand(r0));
6340 __ j(not_zero, miss);
6341 __ jmp(done);
6342 return result;
6343}
6344
6345
6346// Probe the string dictionary in the |elements| register. Jump to the
6347// |done| label if a property with the given name is found leaving the
6348// index into the dictionary in |r0|. Jump to the |miss| label
6349// otherwise.
6350void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
6351 Label* miss,
6352 Label* done,
6353 Register elements,
6354 Register name,
6355 Register r0,
6356 Register r1) {
6357 // Assert that name contains a string.
6358 if (FLAG_debug_code) __ AbortIfNotString(name);
6359
6360 __ mov(r1, FieldOperand(elements, kCapacityOffset));
6361 __ shr(r1, kSmiTagSize); // convert smi to int
6362 __ dec(r1);
6363
6364 // Generate an unrolled loop that performs a few probes before
6365 // giving up. Measurements done on Gmail indicate that 2 probes
6366 // cover ~93% of loads from dictionaries.
6367 for (int i = 0; i < kInlinedProbes; i++) {
6368 // Compute the masked index: (hash + i + i * i) & mask.
6369 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6370 __ shr(r0, String::kHashShift);
6371 if (i > 0) {
6372 __ add(Operand(r0), Immediate(StringDictionary::GetProbeOffset(i)));
6373 }
6374 __ and_(r0, Operand(r1));
6375
6376 // Scale the index by multiplying by the entry size.
6377 ASSERT(StringDictionary::kEntrySize == 3);
6378 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
6379
6380 // Check if the key is identical to the name.
6381 __ cmp(name, Operand(elements,
6382 r0,
6383 times_4,
6384 kElementsStartOffset - kHeapObjectTag));
6385 __ j(equal, done);
6386 }
6387
6388 StringDictionaryLookupStub stub(elements,
6389 r1,
6390 r0,
6391 POSITIVE_LOOKUP);
6392 __ push(name);
6393 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6394 __ shr(r0, String::kHashShift);
6395 __ push(r0);
6396 __ CallStub(&stub);
6397
6398 __ test(r1, Operand(r1));
6399 __ j(zero, miss);
6400 __ jmp(done);
6401}
6402
6403
6404void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6405 // Stack frame on entry:
6406 // esp[0 * kPointerSize]: return address.
6407 // esp[1 * kPointerSize]: key's hash.
6408 // esp[2 * kPointerSize]: key.
6409 // Registers:
6410 // dictionary_: StringDictionary to probe.
6411 // result_: used as scratch.
6412 // index_: will hold an index of entry if lookup is successful.
6413 // might alias with result_.
6414 // Returns:
6415 // result_ is zero if lookup failed, non zero otherwise.
6416
6417 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
6418
6419 Register scratch = result_;
6420
6421 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
6422 __ dec(scratch);
6423 __ SmiUntag(scratch);
6424 __ push(scratch);
6425
6426 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6427 // not equal to the name and kProbes-th slot is not used (its name is the
6428 // undefined value), it guarantees the hash table doesn't contain the
6429 // property. It's true even if some slots represent deleted properties
6430 // (their names are the null value).
6431 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6432 // Compute the masked index: (hash + i + i * i) & mask.
6433 __ mov(scratch, Operand(esp, 2 * kPointerSize));
6434 if (i > 0) {
6435 __ add(Operand(scratch),
6436 Immediate(StringDictionary::GetProbeOffset(i)));
6437 }
6438 __ and_(scratch, Operand(esp, 0));
6439
6440 // Scale the index by multiplying by the entry size.
6441 ASSERT(StringDictionary::kEntrySize == 3);
6442 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6443
6444 // Having undefined at this place means the name is not contained.
6445 ASSERT_EQ(kSmiTagSize, 1);
6446 __ mov(scratch, Operand(dictionary_,
6447 index_,
6448 times_pointer_size,
6449 kElementsStartOffset - kHeapObjectTag));
6450 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
6451 __ j(equal, &not_in_dictionary);
6452
6453 // Stop if found the property.
6454 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
6455 __ j(equal, &in_dictionary);
6456
6457 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6458 // If we hit a non symbol key during negative lookup
6459 // we have to bailout as this key might be equal to the
6460 // key we are looking for.
6461
6462 // Check if the entry name is not a symbol.
6463 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6464 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset),
6465 kIsSymbolMask);
6466 __ j(zero, &maybe_in_dictionary);
6467 }
6468 }
6469
6470 __ bind(&maybe_in_dictionary);
6471 // If we are doing negative lookup then probing failure should be
6472 // treated as a lookup success. For positive lookup probing failure
6473 // should be treated as lookup failure.
6474 if (mode_ == POSITIVE_LOOKUP) {
6475 __ mov(result_, Immediate(0));
6476 __ Drop(1);
6477 __ ret(2 * kPointerSize);
6478 }
6479
6480 __ bind(&in_dictionary);
6481 __ mov(result_, Immediate(1));
6482 __ Drop(1);
6483 __ ret(2 * kPointerSize);
6484
6485 __ bind(&not_in_dictionary);
6486 __ mov(result_, Immediate(0));
6487 __ Drop(1);
6488 __ ret(2 * kPointerSize);
6489}
6490
6491
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006492#undef __
6493
6494} } // namespace v8::internal
6495
6496#endif // V8_TARGET_ARCH_IA32