blob: 8a5bd50ece187b17bcf2b7a79ea8d7d02ee02e89 [file] [log] [blame]
Ben Murdoch85b71792012-04-11 18:30:58 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Kristian Monsen80d68ea2010-09-08 11:05:35 +010032#include "bootstrapper.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000033#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "isolate.h"
Ben Murdoch257744e2011-11-30 15:57:28 +000035#include "jsregexp.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010036#include "regexp-macro-assembler.h"
37
38namespace v8 {
39namespace internal {
40
41#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010042
43void ToNumberStub::Generate(MacroAssembler* masm) {
44 // The ToNumber stub takes one argument in eax.
Ben Murdoch257744e2011-11-30 15:57:28 +000045 Label check_heap_number, call_builtin;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +000046 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010047 __ ret(0);
48
49 __ bind(&check_heap_number);
50 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010051 Factory* factory = masm->isolate()->factory();
Ben Murdoch85b71792012-04-11 18:30:58 +010052 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +000053 __ j(not_equal, &call_builtin, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010054 __ ret(0);
55
56 __ bind(&call_builtin);
57 __ pop(ecx); // Pop return address.
58 __ push(eax);
59 __ push(ecx); // Push return address.
60 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
61}
62
63
Kristian Monsen80d68ea2010-09-08 11:05:35 +010064void FastNewClosureStub::Generate(MacroAssembler* masm) {
65 // Create a new closure from the given function info in new
66 // space. Set the context to the current context in esi.
67 Label gc;
68 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
69
70 // Get the function info from the stack.
71 __ mov(edx, Operand(esp, 1 * kPointerSize));
72
Ben Murdoch85b71792012-04-11 18:30:58 +010073 int map_index = strict_mode_ == kStrictMode
74 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
75 : Context::FUNCTION_MAP_INDEX;
Steve Block44f0eee2011-05-26 01:26:41 +010076
Kristian Monsen80d68ea2010-09-08 11:05:35 +010077 // Compute the function map in the current global context and set that
78 // as the map of the allocated object.
79 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
80 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010081 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010082 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
83
84 // Initialize the rest of the function. We don't have to update the
85 // write barrier because the allocated object is in new space.
Steve Block44f0eee2011-05-26 01:26:41 +010086 Factory* factory = masm->isolate()->factory();
87 __ mov(ebx, Immediate(factory->empty_fixed_array()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010088 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
89 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
90 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010091 Immediate(factory->the_hole_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010092 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
93 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
94 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010095 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset),
Steve Block44f0eee2011-05-26 01:26:41 +010096 Immediate(factory->undefined_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010097
98 // Initialize the code pointer in the function to be the one
99 // found in the shared function info object.
100 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
101 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
102 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
103
104 // Return and remove the on-stack parameter.
105 __ ret(1 * kPointerSize);
106
107 // Create a new closure through the slower runtime call.
108 __ bind(&gc);
109 __ pop(ecx); // Temporarily remove return address.
110 __ pop(edx);
111 __ push(esi);
112 __ push(edx);
Steve Block44f0eee2011-05-26 01:26:41 +0100113 __ push(Immediate(factory->false_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100114 __ push(ecx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800115 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100116}
117
118
119void FastNewContextStub::Generate(MacroAssembler* masm) {
120 // Try to allocate the context in new space.
121 Label gc;
122 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
123 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
124 eax, ebx, ecx, &gc, TAG_OBJECT);
125
126 // Get the function from the stack.
127 __ mov(ecx, Operand(esp, 1 * kPointerSize));
128
Ben Murdoch85b71792012-04-11 18:30:58 +0100129 // Setup the object header.
Steve Block44f0eee2011-05-26 01:26:41 +0100130 Factory* factory = masm->isolate()->factory();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000131 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
132 factory->function_context_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133 __ mov(FieldOperand(eax, Context::kLengthOffset),
134 Immediate(Smi::FromInt(length)));
135
Ben Murdoch85b71792012-04-11 18:30:58 +0100136 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100137 __ Set(ebx, Immediate(0)); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100138 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000139 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100140 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
141
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000142 // Copy the global object from the previous context.
143 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100144 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
145
146 // Initialize the rest of the slots to undefined.
Steve Block44f0eee2011-05-26 01:26:41 +0100147 __ mov(ebx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100148 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
149 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
150 }
151
152 // Return and remove the on-stack parameter.
Ben Murdoch85b71792012-04-11 18:30:58 +0100153 __ mov(esi, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100154 __ ret(1 * kPointerSize);
155
156 // Need to collect. Call into runtime system.
157 __ bind(&gc);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100159}
160
161
162void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
163 // Stack layout on entry:
164 //
165 // [esp + kPointerSize]: constant elements.
166 // [esp + (2 * kPointerSize)]: literal index.
167 // [esp + (3 * kPointerSize)]: literals array.
168
Ben Murdoch85b71792012-04-11 18:30:58 +0100169 // All sizes here are multiples of kPointerSize.
170 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
171 int size = JSArray::kSize + elements_size;
172
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100173 // Load boilerplate object into ecx and check if we need to create a
174 // boilerplate.
Ben Murdoch85b71792012-04-11 18:30:58 +0100175 Label slow_case;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100176 __ mov(ecx, Operand(esp, 3 * kPointerSize));
177 __ mov(eax, Operand(esp, 2 * kPointerSize));
178 STATIC_ASSERT(kPointerSize == 4);
179 STATIC_ASSERT(kSmiTagSize == 1);
180 STATIC_ASSERT(kSmiTag == 0);
181 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
182 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100183 Factory* factory = masm->isolate()->factory();
184 __ cmp(ecx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100185 __ j(equal, &slow_case);
186
187 if (FLAG_debug_code) {
188 const char* message;
189 Handle<Map> expected_map;
Ben Murdoch85b71792012-04-11 18:30:58 +0100190 if (mode_ == CLONE_ELEMENTS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100191 message = "Expected (writable) fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100192 expected_map = factory->fixed_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100193 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +0100194 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100195 message = "Expected copy-on-write fixed array";
Steve Block44f0eee2011-05-26 01:26:41 +0100196 expected_map = factory->fixed_cow_array_map();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100197 }
198 __ push(ecx);
199 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
200 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
201 __ Assert(equal, message);
202 __ pop(ecx);
203 }
204
Ben Murdoch85b71792012-04-11 18:30:58 +0100205 // Allocate both the JS array and the elements array in one big
206 // allocation. This avoids multiple limit checks.
207 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
208
209 // Copy the JS array part.
210 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
211 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
212 __ mov(ebx, FieldOperand(ecx, i));
213 __ mov(FieldOperand(eax, i), ebx);
214 }
215 }
216
217 if (length_ > 0) {
218 // Get hold of the elements array of the boilerplate and setup the
219 // elements pointer in the resulting object.
220 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
221 __ lea(edx, Operand(eax, JSArray::kSize));
222 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
223
224 // Copy the elements array.
225 for (int i = 0; i < elements_size; i += kPointerSize) {
226 __ mov(ebx, FieldOperand(ecx, i));
227 __ mov(FieldOperand(edx, i), ebx);
228 }
229 }
230
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100231 // Return and remove the on-stack parameters.
232 __ ret(3 * kPointerSize);
233
234 __ bind(&slow_case);
235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
236}
237
238
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000239// The stub expects its argument on the stack and returns its result in tos_:
240// zero for false, and a non-zero value for true.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100241void ToBooleanStub::Generate(MacroAssembler* masm) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000242 Label patch;
Ben Murdoch257744e2011-11-30 15:57:28 +0000243 Factory* factory = masm->isolate()->factory();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000244 const Register argument = eax;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000245 const Register map = edx;
246
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000247 if (!types_.IsEmpty()) {
248 __ mov(argument, Operand(esp, 1 * kPointerSize));
249 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000250
251 // undefined -> false
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000252 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
Ben Murdoch257744e2011-11-30 15:57:28 +0000253
254 // Boolean -> its value
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000255 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
256 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100257
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000258 // 'null' -> false.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000259 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100260
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000261 if (types_.Contains(SMI)) {
262 // Smis: 0 -> false, all other -> true
263 Label not_smi;
264 __ JumpIfNotSmi(argument, &not_smi, Label::kNear);
265 // argument contains the correct return value already.
266 if (!tos_.is(argument)) {
267 __ mov(tos_, argument);
268 }
269 __ ret(1 * kPointerSize);
270 __ bind(&not_smi);
271 } else if (types_.NeedsMap()) {
272 // If we need a map later and have a Smi -> patch.
273 __ JumpIfSmi(argument, &patch, Label::kNear);
274 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000276 if (types_.NeedsMap()) {
277 __ mov(map, FieldOperand(argument, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100278
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000279 if (types_.CanBeUndetectable()) {
280 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
281 1 << Map::kIsUndetectable);
282 // Undetectable -> false.
283 Label not_undetectable;
284 __ j(zero, &not_undetectable, Label::kNear);
285 __ Set(tos_, Immediate(0));
286 __ ret(1 * kPointerSize);
287 __ bind(&not_undetectable);
288 }
289 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100290
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000291 if (types_.Contains(SPEC_OBJECT)) {
292 // spec object -> true.
293 Label not_js_object;
294 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
295 __ j(below, &not_js_object, Label::kNear);
296 // argument contains the correct return value already.
297 if (!tos_.is(argument)) {
298 __ Set(tos_, Immediate(1));
299 }
300 __ ret(1 * kPointerSize);
301 __ bind(&not_js_object);
302 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100303
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000304 if (types_.Contains(STRING)) {
305 // String value -> false iff empty.
306 Label not_string;
307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
308 __ j(above_equal, &not_string, Label::kNear);
309 __ mov(tos_, FieldOperand(argument, String::kLengthOffset));
310 __ ret(1 * kPointerSize); // the string length is OK as the return value
311 __ bind(&not_string);
312 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100313
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000314 if (types_.Contains(HEAP_NUMBER)) {
315 // heap number -> false iff +0, -0, or NaN.
316 Label not_heap_number, false_result;
317 __ cmp(map, factory->heap_number_map());
318 __ j(not_equal, &not_heap_number, Label::kNear);
319 __ fldz();
320 __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset));
321 __ FCmp();
322 __ j(zero, &false_result, Label::kNear);
323 // argument contains the correct return value already.
324 if (!tos_.is(argument)) {
325 __ Set(tos_, Immediate(1));
326 }
327 __ ret(1 * kPointerSize);
328 __ bind(&false_result);
329 __ Set(tos_, Immediate(0));
330 __ ret(1 * kPointerSize);
331 __ bind(&not_heap_number);
332 }
333
334 __ bind(&patch);
335 GenerateTypeTransition(masm);
336}
337
338
339void ToBooleanStub::CheckOddball(MacroAssembler* masm,
340 Type type,
341 Heap::RootListIndex value,
342 bool result) {
343 const Register argument = eax;
344 if (types_.Contains(type)) {
345 // If we see an expected oddball, return its ToBoolean value tos_.
346 Label different_value;
347 __ CompareRoot(argument, value);
348 __ j(not_equal, &different_value, Label::kNear);
349 if (!result) {
350 // If we have to return zero, there is no way around clearing tos_.
351 __ Set(tos_, Immediate(0));
352 } else if (!tos_.is(argument)) {
353 // If we have to return non-zero, we can re-use the argument if it is the
354 // same register as the result, because we never see Smi-zero here.
355 __ Set(tos_, Immediate(1));
356 }
357 __ ret(1 * kPointerSize);
358 __ bind(&different_value);
359 }
360}
361
362
363void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
364 __ pop(ecx); // Get return address, operand is now on top of stack.
365 __ push(Immediate(Smi::FromInt(tos_.code())));
366 __ push(Immediate(Smi::FromInt(types_.ToByte())));
367 __ push(ecx); // Push return address.
368 // Patch the caller to an appropriate specialized stub and return the
369 // operation result to the caller of the stub.
370 __ TailCallExternalReference(
371 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
372 3,
373 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100374}
375
376
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100377class FloatingPointHelper : public AllStatic {
378 public:
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100379 enum ArgLocation {
380 ARGS_ON_STACK,
381 ARGS_IN_REGISTERS
382 };
383
384 // Code pattern for loading a floating point value. Input value must
385 // be either a smi or a heap number object (fp value). Requirements:
386 // operand in register number. Returns operand as floating point number
387 // on FPU stack.
388 static void LoadFloatOperand(MacroAssembler* masm, Register number);
389
390 // Code pattern for loading floating point values. Input values must
391 // be either smi or heap number objects (fp values). Requirements:
392 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
393 // Returns operands as floating point numbers on FPU stack.
394 static void LoadFloatOperands(MacroAssembler* masm,
395 Register scratch,
396 ArgLocation arg_location = ARGS_ON_STACK);
397
398 // Similar to LoadFloatOperand but assumes that both operands are smis.
399 // Expects operands in edx, eax.
400 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
401
402 // Test if operands are smi or number objects (fp). Requirements:
403 // operand_1 in eax, operand_2 in edx; falls through on float
404 // operands, jumps to the non_float label otherwise.
405 static void CheckFloatOperands(MacroAssembler* masm,
406 Label* non_float,
407 Register scratch);
408
Ben Murdochb0fe1622011-05-05 13:52:32 +0100409 // Checks that the two floating point numbers on top of the FPU stack
410 // have int32 values.
411 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
412 Label* non_int32);
413
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100414 // Takes the operands in edx and eax and loads them as integers in eax
415 // and ecx.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100416 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
417 bool use_sse3,
418 Label* operand_conversion_failure);
419
Ben Murdochb0fe1622011-05-05 13:52:32 +0100420 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
421 // operands are pushed on the stack, and that their conversions to int32
422 // are in eax and ecx. Checks that the original numbers were in the int32
423 // range.
424 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
425 bool use_sse3,
426 Label* not_int32);
427
428 // Assumes that operands are smis or heap numbers and loads them
429 // into xmm0 and xmm1. Operands are in edx and eax.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100430 // Leaves operands unchanged.
431 static void LoadSSE2Operands(MacroAssembler* masm);
432
433 // Test if operands are numbers (smi or HeapNumber objects), and load
434 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
435 // either operand is not a number. Operands are in edx and eax.
436 // Leaves operands unchanged.
437 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
438
439 // Similar to LoadSSE2Operands but assumes that both operands are smis.
440 // Expects operands in edx, eax.
441 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100442
443 // Checks that the two floating point numbers loaded into xmm0 and xmm1
444 // have int32 values.
445 static void CheckSSE2OperandsAreInt32(MacroAssembler* masm,
446 Label* non_int32,
447 Register scratch);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100448};
449
450
Ben Murdoch257744e2011-11-30 15:57:28 +0000451// Get the integer part of a heap number. Surprisingly, all this bit twiddling
452// is faster than using the built-in instructions on floating point registers.
453// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
454// trashed registers.
455static void IntegerConvert(MacroAssembler* masm,
456 Register source,
457 bool use_sse3,
458 Label* conversion_failure) {
459 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
460 Label done, right_exponent, normal_exponent;
461 Register scratch = ebx;
462 Register scratch2 = edi;
463 // Get exponent word.
464 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
465 // Get exponent alone in scratch2.
466 __ mov(scratch2, scratch);
467 __ and_(scratch2, HeapNumber::kExponentMask);
468 if (use_sse3) {
469 CpuFeatures::Scope scope(SSE3);
470 // Check whether the exponent is too big for a 64 bit signed integer.
471 static const uint32_t kTooBigExponent =
472 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
Ben Murdoch85b71792012-04-11 18:30:58 +0100473 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
Ben Murdoch257744e2011-11-30 15:57:28 +0000474 __ j(greater_equal, conversion_failure);
475 // Load x87 register with heap number.
476 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
477 // Reserve space for 64 bit answer.
Ben Murdoch85b71792012-04-11 18:30:58 +0100478 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
Ben Murdoch257744e2011-11-30 15:57:28 +0000479 // Do conversion, which cannot fail because we checked the exponent.
480 __ fisttp_d(Operand(esp, 0));
481 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
Ben Murdoch85b71792012-04-11 18:30:58 +0100482 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
Ben Murdoch257744e2011-11-30 15:57:28 +0000483 } else {
484 // Load ecx with zero. We use this either for the final shift or
485 // for the answer.
Ben Murdoch85b71792012-04-11 18:30:58 +0100486 __ xor_(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +0000487 // Check whether the exponent matches a 32 bit signed int that cannot be
488 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
489 // exponent is 30 (biased). This is the exponent that we are fastest at and
490 // also the highest exponent we can handle here.
491 const uint32_t non_smi_exponent =
492 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
Ben Murdoch85b71792012-04-11 18:30:58 +0100493 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
Ben Murdoch257744e2011-11-30 15:57:28 +0000494 // If we have a match of the int32-but-not-Smi exponent then skip some
495 // logic.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000496 __ j(equal, &right_exponent, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000497 // If the exponent is higher than that then go to slow case. This catches
498 // numbers that don't fit in a signed int32, infinities and NaNs.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000499 __ j(less, &normal_exponent, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000500
501 {
502 // Handle a big exponent. The only reason we have this code is that the
503 // >>> operator has a tendency to generate numbers with an exponent of 31.
504 const uint32_t big_non_smi_exponent =
505 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
Ben Murdoch85b71792012-04-11 18:30:58 +0100506 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
Ben Murdoch257744e2011-11-30 15:57:28 +0000507 __ j(not_equal, conversion_failure);
508 // We have the big exponent, typically from >>>. This means the number is
509 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
510 __ mov(scratch2, scratch);
511 __ and_(scratch2, HeapNumber::kMantissaMask);
512 // Put back the implicit 1.
513 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
514 // Shift up the mantissa bits to take up the space the exponent used to
515 // take. We just orred in the implicit bit so that took care of one and
516 // we want to use the full unsigned range so we subtract 1 bit from the
517 // shift distance.
518 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
519 __ shl(scratch2, big_shift_distance);
520 // Get the second half of the double.
521 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
522 // Shift down 21 bits to get the most significant 11 bits or the low
523 // mantissa word.
524 __ shr(ecx, 32 - big_shift_distance);
Ben Murdoch85b71792012-04-11 18:30:58 +0100525 __ or_(ecx, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000526 // We have the answer in ecx, but we may need to negate it.
Ben Murdoch85b71792012-04-11 18:30:58 +0100527 __ test(scratch, Operand(scratch));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000528 __ j(positive, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 __ neg(ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000530 __ jmp(&done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000531 }
532
533 __ bind(&normal_exponent);
534 // Exponent word in scratch, exponent part of exponent word in scratch2.
535 // Zero in ecx.
536 // We know the exponent is smaller than 30 (biased). If it is less than
Ben Murdoch85b71792012-04-11 18:30:58 +0100537 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
Ben Murdoch257744e2011-11-30 15:57:28 +0000538 // it rounds to zero.
539 const uint32_t zero_exponent =
540 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
Ben Murdoch85b71792012-04-11 18:30:58 +0100541 __ sub(Operand(scratch2), Immediate(zero_exponent));
Ben Murdoch257744e2011-11-30 15:57:28 +0000542 // ecx already has a Smi zero.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000543 __ j(less, &done, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000544
545 // We have a shifted exponent between 0 and 30 in scratch2.
546 __ shr(scratch2, HeapNumber::kExponentShift);
547 __ mov(ecx, Immediate(30));
Ben Murdoch85b71792012-04-11 18:30:58 +0100548 __ sub(ecx, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000549
550 __ bind(&right_exponent);
551 // Here ecx is the shift, scratch is the exponent word.
552 // Get the top bits of the mantissa.
553 __ and_(scratch, HeapNumber::kMantissaMask);
554 // Put back the implicit 1.
555 __ or_(scratch, 1 << HeapNumber::kExponentShift);
556 // Shift up the mantissa bits to take up the space the exponent used to
557 // take. We have kExponentShift + 1 significant bits int he low end of the
558 // word. Shift them to the top bits.
559 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
560 __ shl(scratch, shift_distance);
561 // Get the second half of the double. For some exponents we don't
562 // actually need this because the bits get shifted out again, but
563 // it's probably slower to test than just to do it.
564 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
565 // Shift down 22 bits to get the most significant 10 bits or the low
566 // mantissa word.
567 __ shr(scratch2, 32 - shift_distance);
Ben Murdoch85b71792012-04-11 18:30:58 +0100568 __ or_(scratch2, Operand(scratch));
Ben Murdoch257744e2011-11-30 15:57:28 +0000569 // Move down according to the exponent.
570 __ shr_cl(scratch2);
571 // Now the unsigned answer is in scratch2. We need to move it to ecx and
572 // we may need to fix the sign.
573 Label negative;
Ben Murdoch85b71792012-04-11 18:30:58 +0100574 __ xor_(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +0000575 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
576 __ j(greater, &negative, Label::kNear);
577 __ mov(ecx, scratch2);
578 __ jmp(&done, Label::kNear);
579 __ bind(&negative);
Ben Murdoch85b71792012-04-11 18:30:58 +0100580 __ sub(ecx, Operand(scratch2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000581 __ bind(&done);
582 }
583}
584
585
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000586void UnaryOpStub::PrintName(StringStream* stream) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000587 const char* op_name = Token::Name(op_);
588 const char* overwrite_name = NULL; // Make g++ happy.
589 switch (mode_) {
590 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
591 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
592 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000593 stream->Add("UnaryOpStub_%s_%s_%s",
594 op_name,
595 overwrite_name,
596 UnaryOpIC::GetName(operand_type_));
Ben Murdoch257744e2011-11-30 15:57:28 +0000597}
598
599
600// TODO(svenpanne): Use virtual functions instead of switch.
601void UnaryOpStub::Generate(MacroAssembler* masm) {
602 switch (operand_type_) {
603 case UnaryOpIC::UNINITIALIZED:
604 GenerateTypeTransition(masm);
605 break;
606 case UnaryOpIC::SMI:
607 GenerateSmiStub(masm);
608 break;
609 case UnaryOpIC::HEAP_NUMBER:
610 GenerateHeapNumberStub(masm);
611 break;
612 case UnaryOpIC::GENERIC:
613 GenerateGenericStub(masm);
614 break;
615 }
616}
617
618
619void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
620 __ pop(ecx); // Save return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000621
622 __ push(eax); // the operand
Ben Murdoch257744e2011-11-30 15:57:28 +0000623 __ push(Immediate(Smi::FromInt(op_)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000624 __ push(Immediate(Smi::FromInt(mode_)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000625 __ push(Immediate(Smi::FromInt(operand_type_)));
626
627 __ push(ecx); // Push return address.
628
629 // Patch the caller to an appropriate specialized stub and return the
630 // operation result to the caller of the stub.
631 __ TailCallExternalReference(
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000632 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000633}
634
635
636// TODO(svenpanne): Use virtual functions instead of switch.
637void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
638 switch (op_) {
639 case Token::SUB:
640 GenerateSmiStubSub(masm);
641 break;
642 case Token::BIT_NOT:
643 GenerateSmiStubBitNot(masm);
644 break;
645 default:
646 UNREACHABLE();
647 }
648}
649
650
651void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
652 Label non_smi, undo, slow;
653 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
654 Label::kNear, Label::kNear, Label::kNear);
655 __ bind(&undo);
656 GenerateSmiCodeUndo(masm);
657 __ bind(&non_smi);
658 __ bind(&slow);
659 GenerateTypeTransition(masm);
660}
661
662
663void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
664 Label non_smi;
665 GenerateSmiCodeBitNot(masm, &non_smi);
666 __ bind(&non_smi);
667 GenerateTypeTransition(masm);
668}
669
670
671void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
672 Label* non_smi,
673 Label* undo,
674 Label* slow,
675 Label::Distance non_smi_near,
676 Label::Distance undo_near,
677 Label::Distance slow_near) {
678 // Check whether the value is a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000679 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
Ben Murdoch257744e2011-11-30 15:57:28 +0000680
681 // We can't handle -0 with smis, so use a type transition for that case.
Ben Murdoch85b71792012-04-11 18:30:58 +0100682 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +0000683 __ j(zero, slow, slow_near);
684
685 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
Ben Murdoch85b71792012-04-11 18:30:58 +0100686 __ mov(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +0000687 __ Set(eax, Immediate(0));
Ben Murdoch85b71792012-04-11 18:30:58 +0100688 __ sub(eax, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +0000689 __ j(overflow, undo, undo_near);
690 __ ret(0);
691}
692
693
694void UnaryOpStub::GenerateSmiCodeBitNot(
695 MacroAssembler* masm,
696 Label* non_smi,
697 Label::Distance non_smi_near) {
698 // Check whether the value is a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000699 __ JumpIfNotSmi(eax, non_smi, non_smi_near);
Ben Murdoch257744e2011-11-30 15:57:28 +0000700
701 // Flip bits and revert inverted smi-tag.
702 __ not_(eax);
703 __ and_(eax, ~kSmiTagMask);
704 __ ret(0);
705}
706
707
708void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100709 __ mov(eax, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +0000710}
711
712
713// TODO(svenpanne): Use virtual functions instead of switch.
714void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
715 switch (op_) {
716 case Token::SUB:
717 GenerateHeapNumberStubSub(masm);
718 break;
719 case Token::BIT_NOT:
720 GenerateHeapNumberStubBitNot(masm);
721 break;
722 default:
723 UNREACHABLE();
724 }
725}
726
727
728void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
729 Label non_smi, undo, slow, call_builtin;
730 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
731 __ bind(&non_smi);
732 GenerateHeapNumberCodeSub(masm, &slow);
733 __ bind(&undo);
734 GenerateSmiCodeUndo(masm);
735 __ bind(&slow);
736 GenerateTypeTransition(masm);
737 __ bind(&call_builtin);
738 GenerateGenericCodeFallback(masm);
739}
740
741
742void UnaryOpStub::GenerateHeapNumberStubBitNot(
743 MacroAssembler* masm) {
744 Label non_smi, slow;
745 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
746 __ bind(&non_smi);
747 GenerateHeapNumberCodeBitNot(masm, &slow);
748 __ bind(&slow);
749 GenerateTypeTransition(masm);
750}
751
752
753void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
754 Label* slow) {
755 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
756 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
757 __ j(not_equal, slow);
758
759 if (mode_ == UNARY_OVERWRITE) {
760 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
761 Immediate(HeapNumber::kSignMask)); // Flip sign.
762 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +0100763 __ mov(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +0000764 // edx: operand
765
766 Label slow_allocate_heapnumber, heapnumber_allocated;
767 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000768 __ jmp(&heapnumber_allocated, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +0000769
770 __ bind(&slow_allocate_heapnumber);
Ben Murdoch85b71792012-04-11 18:30:58 +0100771 __ EnterInternalFrame();
772 __ push(edx);
773 __ CallRuntime(Runtime::kNumberAlloc, 0);
774 __ pop(edx);
775 __ LeaveInternalFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +0000776
777 __ bind(&heapnumber_allocated);
778 // eax: allocated 'empty' number
779 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
780 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
781 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
782 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
783 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
784 }
785 __ ret(0);
786}
787
788
789void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
790 Label* slow) {
791 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
792 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
793 __ j(not_equal, slow);
794
795 // Convert the heap number in eax to an untagged integer in ecx.
796 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
797
798 // Do the bitwise operation and check if the result fits in a smi.
799 Label try_float;
800 __ not_(ecx);
801 __ cmp(ecx, 0xc0000000);
802 __ j(sign, &try_float, Label::kNear);
803
804 // Tag the result as a smi and we're done.
805 STATIC_ASSERT(kSmiTagSize == 1);
806 __ lea(eax, Operand(ecx, times_2, kSmiTag));
807 __ ret(0);
808
809 // Try to store the result in a heap number.
810 __ bind(&try_float);
811 if (mode_ == UNARY_NO_OVERWRITE) {
812 Label slow_allocate_heapnumber, heapnumber_allocated;
813 __ mov(ebx, eax);
814 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
815 __ jmp(&heapnumber_allocated);
816
817 __ bind(&slow_allocate_heapnumber);
Ben Murdoch85b71792012-04-11 18:30:58 +0100818 __ EnterInternalFrame();
819 // Push the original HeapNumber on the stack. The integer value can't
820 // be stored since it's untagged and not in the smi range (so we can't
821 // smi-tag it). We'll recalculate the value after the GC instead.
822 __ push(ebx);
823 __ CallRuntime(Runtime::kNumberAlloc, 0);
824 // New HeapNumber is in eax.
825 __ pop(edx);
826 __ LeaveInternalFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +0000827 // IntegerConvert uses ebx and edi as scratch registers.
828 // This conversion won't go slow-case.
829 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
830 __ not_(ecx);
831
832 __ bind(&heapnumber_allocated);
833 }
834 if (CpuFeatures::IsSupported(SSE2)) {
835 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +0100836 __ cvtsi2sd(xmm0, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +0000837 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
838 } else {
839 __ push(ecx);
840 __ fild_s(Operand(esp, 0));
841 __ pop(ecx);
842 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
843 }
844 __ ret(0);
845}
846
847
848// TODO(svenpanne): Use virtual functions instead of switch.
849void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
850 switch (op_) {
851 case Token::SUB:
852 GenerateGenericStubSub(masm);
853 break;
854 case Token::BIT_NOT:
855 GenerateGenericStubBitNot(masm);
856 break;
857 default:
858 UNREACHABLE();
859 }
860}
861
862
863void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
864 Label non_smi, undo, slow;
865 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
866 __ bind(&non_smi);
867 GenerateHeapNumberCodeSub(masm, &slow);
868 __ bind(&undo);
869 GenerateSmiCodeUndo(masm);
870 __ bind(&slow);
871 GenerateGenericCodeFallback(masm);
872}
873
874
875void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
876 Label non_smi, slow;
877 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
878 __ bind(&non_smi);
879 GenerateHeapNumberCodeBitNot(masm, &slow);
880 __ bind(&slow);
881 GenerateGenericCodeFallback(masm);
882}
883
884
885void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
886 // Handle the slow case by jumping to the corresponding JavaScript builtin.
887 __ pop(ecx); // pop return address.
888 __ push(eax);
889 __ push(ecx); // push return address
890 switch (op_) {
891 case Token::SUB:
892 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
893 break;
894 case Token::BIT_NOT:
895 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
896 break;
897 default:
898 UNREACHABLE();
899 }
900}
901
902
Ben Murdoch257744e2011-11-30 15:57:28 +0000903void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100904 __ pop(ecx); // Save return address.
905 __ push(edx);
906 __ push(eax);
907 // Left and right arguments are now on top.
908 // Push this stub's key. Although the operation and the type info are
909 // encoded into the key, the encoding is opaque, so push them too.
910 __ push(Immediate(Smi::FromInt(MinorKey())));
911 __ push(Immediate(Smi::FromInt(op_)));
912 __ push(Immediate(Smi::FromInt(operands_type_)));
913
914 __ push(ecx); // Push return address.
915
916 // Patch the caller to an appropriate specialized stub and return the
917 // operation result to the caller of the stub.
918 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000919 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100920 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100921 5,
922 1);
923}
924
925
926// Prepare for a type transition runtime call when the args are already on
927// the stack, under the return address.
Ben Murdoch257744e2011-11-30 15:57:28 +0000928void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100929 __ pop(ecx); // Save return address.
930 // Left and right arguments are already on top of the stack.
931 // Push this stub's key. Although the operation and the type info are
932 // encoded into the key, the encoding is opaque, so push them too.
933 __ push(Immediate(Smi::FromInt(MinorKey())));
934 __ push(Immediate(Smi::FromInt(op_)));
935 __ push(Immediate(Smi::FromInt(operands_type_)));
936
937 __ push(ecx); // Push return address.
938
939 // Patch the caller to an appropriate specialized stub and return the
940 // operation result to the caller of the stub.
941 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000942 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100943 masm->isolate()),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100944 5,
945 1);
946}
947
948
Ben Murdoch257744e2011-11-30 15:57:28 +0000949void BinaryOpStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100950 switch (operands_type_) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000951 case BinaryOpIC::UNINITIALIZED:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100952 GenerateTypeTransition(masm);
953 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000954 case BinaryOpIC::SMI:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100955 GenerateSmiStub(masm);
956 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000957 case BinaryOpIC::INT32:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100958 GenerateInt32Stub(masm);
959 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000960 case BinaryOpIC::HEAP_NUMBER:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100961 GenerateHeapNumberStub(masm);
962 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000963 case BinaryOpIC::ODDBALL:
Steve Block44f0eee2011-05-26 01:26:41 +0100964 GenerateOddballStub(masm);
965 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000966 case BinaryOpIC::BOTH_STRING:
967 GenerateBothStringStub(masm);
968 break;
969 case BinaryOpIC::STRING:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100970 GenerateStringStub(masm);
971 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000972 case BinaryOpIC::GENERIC:
Ben Murdochb0fe1622011-05-05 13:52:32 +0100973 GenerateGeneric(masm);
974 break;
975 default:
976 UNREACHABLE();
977 }
978}
979
980
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000981void BinaryOpStub::PrintName(StringStream* stream) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100982 const char* op_name = Token::Name(op_);
983 const char* overwrite_name;
984 switch (mode_) {
985 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
986 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
987 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
988 default: overwrite_name = "UnknownOverwrite"; break;
989 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000990 stream->Add("BinaryOpStub_%s_%s_%s",
991 op_name,
992 overwrite_name,
993 BinaryOpIC::GetName(operands_type_));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100994}
995
996
Ben Murdoch257744e2011-11-30 15:57:28 +0000997void BinaryOpStub::GenerateSmiCode(
998 MacroAssembler* masm,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100999 Label* slow,
1000 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
1001 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
1002 // dividend in eax and edx free for the division. Use eax, ebx for those.
1003 Comment load_comment(masm, "-- Load arguments");
1004 Register left = edx;
1005 Register right = eax;
1006 if (op_ == Token::DIV || op_ == Token::MOD) {
1007 left = eax;
1008 right = ebx;
1009 __ mov(ebx, eax);
1010 __ mov(eax, edx);
1011 }
1012
1013
1014 // 2. Prepare the smi check of both operands by oring them together.
1015 Comment smi_check_comment(masm, "-- Smi check arguments");
1016 Label not_smis;
1017 Register combined = ecx;
1018 ASSERT(!left.is(combined) && !right.is(combined));
1019 switch (op_) {
1020 case Token::BIT_OR:
1021 // Perform the operation into eax and smi check the result. Preserve
1022 // eax in case the result is not a smi.
1023 ASSERT(!left.is(ecx) && !right.is(ecx));
1024 __ mov(ecx, right);
Ben Murdoch85b71792012-04-11 18:30:58 +01001025 __ or_(right, Operand(left)); // Bitwise or is commutative.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001026 combined = right;
1027 break;
1028
1029 case Token::BIT_XOR:
1030 case Token::BIT_AND:
1031 case Token::ADD:
1032 case Token::SUB:
1033 case Token::MUL:
1034 case Token::DIV:
1035 case Token::MOD:
1036 __ mov(combined, right);
Ben Murdoch85b71792012-04-11 18:30:58 +01001037 __ or_(combined, Operand(left));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001038 break;
1039
1040 case Token::SHL:
1041 case Token::SAR:
1042 case Token::SHR:
1043 // Move the right operand into ecx for the shift operation, use eax
1044 // for the smi check register.
1045 ASSERT(!left.is(ecx) && !right.is(ecx));
1046 __ mov(ecx, right);
Ben Murdoch85b71792012-04-11 18:30:58 +01001047 __ or_(right, Operand(left));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001048 combined = right;
1049 break;
1050
1051 default:
1052 break;
1053 }
1054
1055 // 3. Perform the smi check of the operands.
1056 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001057 __ JumpIfNotSmi(combined, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001058
1059 // 4. Operands are both smis, perform the operation leaving the result in
1060 // eax and check the result if necessary.
1061 Comment perform_smi(masm, "-- Perform smi operation");
1062 Label use_fp_on_smis;
1063 switch (op_) {
1064 case Token::BIT_OR:
1065 // Nothing to do.
1066 break;
1067
1068 case Token::BIT_XOR:
1069 ASSERT(right.is(eax));
Ben Murdoch85b71792012-04-11 18:30:58 +01001070 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001071 break;
1072
1073 case Token::BIT_AND:
1074 ASSERT(right.is(eax));
Ben Murdoch85b71792012-04-11 18:30:58 +01001075 __ and_(right, Operand(left)); // Bitwise and is commutative.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001076 break;
1077
1078 case Token::SHL:
1079 // Remove tags from operands (but keep sign).
1080 __ SmiUntag(left);
1081 __ SmiUntag(ecx);
1082 // Perform the operation.
1083 __ shl_cl(left);
1084 // Check that the *signed* result fits in a smi.
1085 __ cmp(left, 0xc0000000);
Ben Murdoch257744e2011-11-30 15:57:28 +00001086 __ j(sign, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001087 // Tag the result and store it in register eax.
1088 __ SmiTag(left);
1089 __ mov(eax, left);
1090 break;
1091
1092 case Token::SAR:
1093 // Remove tags from operands (but keep sign).
1094 __ SmiUntag(left);
1095 __ SmiUntag(ecx);
1096 // Perform the operation.
1097 __ sar_cl(left);
1098 // Tag the result and store it in register eax.
1099 __ SmiTag(left);
1100 __ mov(eax, left);
1101 break;
1102
1103 case Token::SHR:
1104 // Remove tags from operands (but keep sign).
1105 __ SmiUntag(left);
1106 __ SmiUntag(ecx);
1107 // Perform the operation.
1108 __ shr_cl(left);
1109 // Check that the *unsigned* result fits in a smi.
1110 // Neither of the two high-order bits can be set:
1111 // - 0x80000000: high bit would be lost when smi tagging.
1112 // - 0x40000000: this number would convert to negative when
1113 // Smi tagging these two cases can only happen with shifts
1114 // by 0 or 1 when handed a valid smi.
1115 __ test(left, Immediate(0xc0000000));
Ben Murdoch257744e2011-11-30 15:57:28 +00001116 __ j(not_zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001117 // Tag the result and store it in register eax.
1118 __ SmiTag(left);
1119 __ mov(eax, left);
1120 break;
1121
1122 case Token::ADD:
1123 ASSERT(right.is(eax));
Ben Murdoch85b71792012-04-11 18:30:58 +01001124 __ add(right, Operand(left)); // Addition is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001125 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 break;
1127
1128 case Token::SUB:
Ben Murdoch85b71792012-04-11 18:30:58 +01001129 __ sub(left, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001130 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001131 __ mov(eax, left);
1132 break;
1133
1134 case Token::MUL:
1135 // If the smi tag is 0 we can just leave the tag on one operand.
1136 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1137 // We can't revert the multiplication if the result is not a smi
1138 // so save the right operand.
1139 __ mov(ebx, right);
1140 // Remove tag from one of the operands (but keep sign).
1141 __ SmiUntag(right);
1142 // Do multiplication.
Ben Murdoch85b71792012-04-11 18:30:58 +01001143 __ imul(right, Operand(left)); // Multiplication is commutative.
Ben Murdoch257744e2011-11-30 15:57:28 +00001144 __ j(overflow, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001145 // Check for negative zero result. Use combined = left | right.
1146 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1147 break;
1148
1149 case Token::DIV:
1150 // We can't revert the division if the result is not a smi so
1151 // save the left operand.
1152 __ mov(edi, left);
1153 // Check for 0 divisor.
Ben Murdoch85b71792012-04-11 18:30:58 +01001154 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001155 __ j(zero, &use_fp_on_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 // Sign extend left into edx:eax.
1157 ASSERT(left.is(eax));
1158 __ cdq();
1159 // Divide edx:eax by right.
1160 __ idiv(right);
1161 // Check for the corner case of dividing the most negative smi by
1162 // -1. We cannot use the overflow flag, since it is not set by idiv
1163 // instruction.
1164 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1165 __ cmp(eax, 0x40000000);
1166 __ j(equal, &use_fp_on_smis);
1167 // Check for negative zero result. Use combined = left | right.
1168 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1169 // Check that the remainder is zero.
Ben Murdoch85b71792012-04-11 18:30:58 +01001170 __ test(edx, Operand(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001171 __ j(not_zero, &use_fp_on_smis);
1172 // Tag the result and store it in register eax.
1173 __ SmiTag(eax);
1174 break;
1175
1176 case Token::MOD:
1177 // Check for 0 divisor.
Ben Murdoch85b71792012-04-11 18:30:58 +01001178 __ test(right, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00001179 __ j(zero, &not_smis);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001180
1181 // Sign extend left into edx:eax.
1182 ASSERT(left.is(eax));
1183 __ cdq();
1184 // Divide edx:eax by right.
1185 __ idiv(right);
1186 // Check for negative zero result. Use combined = left | right.
1187 __ NegativeZeroTest(edx, combined, slow);
1188 // Move remainder to register eax.
1189 __ mov(eax, edx);
1190 break;
1191
1192 default:
1193 UNREACHABLE();
1194 }
1195
1196 // 5. Emit return of result in eax. Some operations have registers pushed.
1197 switch (op_) {
1198 case Token::ADD:
1199 case Token::SUB:
1200 case Token::MUL:
1201 case Token::DIV:
1202 __ ret(0);
1203 break;
1204 case Token::MOD:
1205 case Token::BIT_OR:
1206 case Token::BIT_AND:
1207 case Token::BIT_XOR:
1208 case Token::SAR:
1209 case Token::SHL:
1210 case Token::SHR:
1211 __ ret(2 * kPointerSize);
1212 break;
1213 default:
1214 UNREACHABLE();
1215 }
1216
1217 // 6. For some operations emit inline code to perform floating point
1218 // operations on known smis (e.g., if the result of the operation
1219 // overflowed the smi range).
1220 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) {
1221 __ bind(&use_fp_on_smis);
1222 switch (op_) {
1223 // Undo the effects of some operations, and some register moves.
1224 case Token::SHL:
1225 // The arguments are saved on the stack, and only used from there.
1226 break;
1227 case Token::ADD:
1228 // Revert right = right + left.
Ben Murdoch85b71792012-04-11 18:30:58 +01001229 __ sub(right, Operand(left));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230 break;
1231 case Token::SUB:
1232 // Revert left = left - right.
Ben Murdoch85b71792012-04-11 18:30:58 +01001233 __ add(left, Operand(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001234 break;
1235 case Token::MUL:
1236 // Right was clobbered but a copy is in ebx.
1237 __ mov(right, ebx);
1238 break;
1239 case Token::DIV:
1240 // Left was clobbered but a copy is in edi. Right is in ebx for
1241 // division. They should be in eax, ebx for jump to not_smi.
1242 __ mov(eax, edi);
1243 break;
1244 default:
1245 // No other operators jump to use_fp_on_smis.
1246 break;
1247 }
1248 __ jmp(&not_smis);
1249 } else {
1250 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1251 switch (op_) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001252 case Token::SHL:
1253 case Token::SHR: {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001254 Comment perform_float(masm, "-- Perform float operation on smis");
1255 __ bind(&use_fp_on_smis);
1256 // Result we want is in left == edx, so we can put the allocated heap
1257 // number in eax.
1258 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1259 // Store the result in the HeapNumber and return.
Ben Murdoch257744e2011-11-30 15:57:28 +00001260 // It's OK to overwrite the arguments on the stack because we
1261 // are about to return.
1262 if (op_ == Token::SHR) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001263 __ mov(Operand(esp, 1 * kPointerSize), left);
Ben Murdoch257744e2011-11-30 15:57:28 +00001264 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0));
1265 __ fild_d(Operand(esp, 1 * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001266 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001267 } else {
1268 ASSERT_EQ(Token::SHL, op_);
1269 if (CpuFeatures::IsSupported(SSE2)) {
1270 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +01001271 __ cvtsi2sd(xmm0, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +00001272 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1273 } else {
1274 __ mov(Operand(esp, 1 * kPointerSize), left);
1275 __ fild_s(Operand(esp, 1 * kPointerSize));
1276 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1277 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001278 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001279 __ ret(2 * kPointerSize);
1280 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001281 }
1282
1283 case Token::ADD:
1284 case Token::SUB:
1285 case Token::MUL:
1286 case Token::DIV: {
1287 Comment perform_float(masm, "-- Perform float operation on smis");
1288 __ bind(&use_fp_on_smis);
1289 // Restore arguments to edx, eax.
1290 switch (op_) {
1291 case Token::ADD:
1292 // Revert right = right + left.
Ben Murdoch85b71792012-04-11 18:30:58 +01001293 __ sub(right, Operand(left));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001294 break;
1295 case Token::SUB:
1296 // Revert left = left - right.
Ben Murdoch85b71792012-04-11 18:30:58 +01001297 __ add(left, Operand(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001298 break;
1299 case Token::MUL:
1300 // Right was clobbered but a copy is in ebx.
1301 __ mov(right, ebx);
1302 break;
1303 case Token::DIV:
1304 // Left was clobbered but a copy is in edi. Right is in ebx for
1305 // division.
1306 __ mov(edx, edi);
1307 __ mov(eax, right);
1308 break;
1309 default: UNREACHABLE();
1310 break;
1311 }
1312 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001313 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001314 CpuFeatures::Scope use_sse2(SSE2);
1315 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1316 switch (op_) {
1317 case Token::ADD: __ addsd(xmm0, xmm1); break;
1318 case Token::SUB: __ subsd(xmm0, xmm1); break;
1319 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1320 case Token::DIV: __ divsd(xmm0, xmm1); break;
1321 default: UNREACHABLE();
1322 }
1323 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
1324 } else { // SSE2 not available, use FPU.
1325 FloatingPointHelper::LoadFloatSmis(masm, ebx);
1326 switch (op_) {
1327 case Token::ADD: __ faddp(1); break;
1328 case Token::SUB: __ fsubp(1); break;
1329 case Token::MUL: __ fmulp(1); break;
1330 case Token::DIV: __ fdivp(1); break;
1331 default: UNREACHABLE();
1332 }
1333 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
1334 }
1335 __ mov(eax, ecx);
1336 __ ret(0);
1337 break;
1338 }
1339
1340 default:
1341 break;
1342 }
1343 }
1344
1345 // 7. Non-smi operands, fall out to the non-smi code with the operands in
1346 // edx and eax.
1347 Comment done_comment(masm, "-- Enter non-smi code");
1348 __ bind(&not_smis);
1349 switch (op_) {
1350 case Token::BIT_OR:
1351 case Token::SHL:
1352 case Token::SAR:
1353 case Token::SHR:
1354 // Right operand is saved in ecx and eax was destroyed by the smi
1355 // check.
1356 __ mov(eax, ecx);
1357 break;
1358
1359 case Token::DIV:
1360 case Token::MOD:
1361 // Operands are in eax, ebx at this point.
1362 __ mov(edx, eax);
1363 __ mov(eax, ebx);
1364 break;
1365
1366 default:
1367 break;
1368 }
1369}
1370
1371
Ben Murdoch257744e2011-11-30 15:57:28 +00001372void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001373 Label call_runtime;
1374
1375 switch (op_) {
1376 case Token::ADD:
1377 case Token::SUB:
1378 case Token::MUL:
1379 case Token::DIV:
1380 break;
1381 case Token::MOD:
1382 case Token::BIT_OR:
1383 case Token::BIT_AND:
1384 case Token::BIT_XOR:
1385 case Token::SAR:
1386 case Token::SHL:
1387 case Token::SHR:
1388 GenerateRegisterArgsPush(masm);
1389 break;
1390 default:
1391 UNREACHABLE();
1392 }
1393
Ben Murdoch257744e2011-11-30 15:57:28 +00001394 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1395 result_type_ == BinaryOpIC::SMI) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001396 GenerateSmiCode(masm, &call_runtime, NO_HEAPNUMBER_RESULTS);
1397 } else {
1398 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1399 }
1400 __ bind(&call_runtime);
1401 switch (op_) {
1402 case Token::ADD:
1403 case Token::SUB:
1404 case Token::MUL:
1405 case Token::DIV:
1406 GenerateTypeTransition(masm);
1407 break;
1408 case Token::MOD:
1409 case Token::BIT_OR:
1410 case Token::BIT_AND:
1411 case Token::BIT_XOR:
1412 case Token::SAR:
1413 case Token::SHL:
1414 case Token::SHR:
1415 GenerateTypeTransitionWithSavedArgs(masm);
1416 break;
1417 default:
1418 UNREACHABLE();
1419 }
1420}
1421
1422
Ben Murdoch257744e2011-11-30 15:57:28 +00001423void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1424 ASSERT(operands_type_ == BinaryOpIC::STRING);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001425 ASSERT(op_ == Token::ADD);
Steve Block1e0659c2011-05-24 12:43:12 +01001426 // Try to add arguments as strings, otherwise, transition to the generic
Ben Murdoch257744e2011-11-30 15:57:28 +00001427 // BinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001428 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001429 GenerateTypeTransition(masm);
1430}
1431
1432
Ben Murdoch257744e2011-11-30 15:57:28 +00001433void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434 Label call_runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00001435 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1436 ASSERT(op_ == Token::ADD);
1437 // If both arguments are strings, call the string add stub.
1438 // Otherwise, do a transition.
1439
1440 // Registers containing left and right operands respectively.
1441 Register left = edx;
1442 Register right = eax;
1443
1444 // Test if left operand is a string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001445 __ JumpIfSmi(left, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001446 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001447 __ j(above_equal, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001448
1449 // Test if right operand is a string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001450 __ JumpIfSmi(right, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001451 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001452 __ j(above_equal, &call_runtime, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00001453
1454 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1455 GenerateRegisterArgsPush(masm);
1456 __ TailCallStub(&string_add_stub);
1457
1458 __ bind(&call_runtime);
1459 GenerateTypeTransition(masm);
1460}
1461
1462
1463void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
1464 Label call_runtime;
1465 ASSERT(operands_type_ == BinaryOpIC::INT32);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466
1467 // Floating point case.
1468 switch (op_) {
1469 case Token::ADD:
1470 case Token::SUB:
1471 case Token::MUL:
1472 case Token::DIV: {
1473 Label not_floats;
1474 Label not_int32;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001475 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 CpuFeatures::Scope use_sse2(SSE2);
1477 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1478 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1479 switch (op_) {
1480 case Token::ADD: __ addsd(xmm0, xmm1); break;
1481 case Token::SUB: __ subsd(xmm0, xmm1); break;
1482 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1483 case Token::DIV: __ divsd(xmm0, xmm1); break;
1484 default: UNREACHABLE();
1485 }
1486 // Check result type if it is currently Int32.
Ben Murdoch257744e2011-11-30 15:57:28 +00001487 if (result_type_ <= BinaryOpIC::INT32) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001488 __ cvttsd2si(ecx, Operand(xmm0));
Ben Murdoch85b71792012-04-11 18:30:58 +01001489 __ cvtsi2sd(xmm2, Operand(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001490 __ ucomisd(xmm0, xmm2);
1491 __ j(not_zero, &not_int32);
1492 __ j(carry, &not_int32);
1493 }
1494 GenerateHeapResultAllocation(masm, &call_runtime);
1495 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1496 __ ret(0);
1497 } else { // SSE2 not available, use FPU.
1498 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1499 FloatingPointHelper::LoadFloatOperands(
1500 masm,
1501 ecx,
1502 FloatingPointHelper::ARGS_IN_REGISTERS);
1503 FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
1504 switch (op_) {
1505 case Token::ADD: __ faddp(1); break;
1506 case Token::SUB: __ fsubp(1); break;
1507 case Token::MUL: __ fmulp(1); break;
1508 case Token::DIV: __ fdivp(1); break;
1509 default: UNREACHABLE();
1510 }
1511 Label after_alloc_failure;
1512 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1513 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1514 __ ret(0);
1515 __ bind(&after_alloc_failure);
1516 __ ffree();
1517 __ jmp(&call_runtime);
1518 }
1519
1520 __ bind(&not_floats);
1521 __ bind(&not_int32);
1522 GenerateTypeTransition(masm);
1523 break;
1524 }
1525
1526 case Token::MOD: {
1527 // For MOD we go directly to runtime in the non-smi case.
1528 break;
1529 }
1530 case Token::BIT_OR:
1531 case Token::BIT_AND:
1532 case Token::BIT_XOR:
1533 case Token::SAR:
1534 case Token::SHL:
1535 case Token::SHR: {
1536 GenerateRegisterArgsPush(masm);
1537 Label not_floats;
1538 Label not_int32;
1539 Label non_smi_result;
1540 /* {
1541 CpuFeatures::Scope use_sse2(SSE2);
1542 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1543 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1544 }*/
1545 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1546 use_sse3_,
1547 &not_floats);
1548 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_,
1549 &not_int32);
1550 switch (op_) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001551 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1552 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1553 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001554 case Token::SAR: __ sar_cl(eax); break;
1555 case Token::SHL: __ shl_cl(eax); break;
1556 case Token::SHR: __ shr_cl(eax); break;
1557 default: UNREACHABLE();
1558 }
1559 if (op_ == Token::SHR) {
1560 // Check if result is non-negative and fits in a smi.
1561 __ test(eax, Immediate(0xc0000000));
1562 __ j(not_zero, &call_runtime);
1563 } else {
1564 // Check if result fits in a smi.
1565 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001566 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001567 }
1568 // Tag smi result and return.
1569 __ SmiTag(eax);
1570 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1571
1572 // All ops except SHR return a signed int32 that we load in
1573 // a HeapNumber.
1574 if (op_ != Token::SHR) {
1575 __ bind(&non_smi_result);
1576 // Allocate a heap number if needed.
Ben Murdoch85b71792012-04-11 18:30:58 +01001577 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001578 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001579 switch (mode_) {
1580 case OVERWRITE_LEFT:
1581 case OVERWRITE_RIGHT:
1582 // If the operand was an object, we skip the
1583 // allocation of a heap number.
1584 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1585 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001586 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001587 // Fall through!
1588 case NO_OVERWRITE:
1589 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1590 __ bind(&skip_allocation);
1591 break;
1592 default: UNREACHABLE();
1593 }
1594 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001595 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001596 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +01001597 __ cvtsi2sd(xmm0, Operand(ebx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001598 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1599 } else {
1600 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1601 __ fild_s(Operand(esp, 1 * kPointerSize));
1602 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1603 }
1604 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1605 }
1606
1607 __ bind(&not_floats);
1608 __ bind(&not_int32);
1609 GenerateTypeTransitionWithSavedArgs(masm);
1610 break;
1611 }
1612 default: UNREACHABLE(); break;
1613 }
1614
1615 // If an allocation fails, or SHR or MOD hit a hard case,
1616 // use the runtime system to get the correct result.
1617 __ bind(&call_runtime);
1618
1619 switch (op_) {
1620 case Token::ADD:
1621 GenerateRegisterArgsPush(masm);
1622 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1623 break;
1624 case Token::SUB:
1625 GenerateRegisterArgsPush(masm);
1626 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1627 break;
1628 case Token::MUL:
1629 GenerateRegisterArgsPush(masm);
1630 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1631 break;
1632 case Token::DIV:
1633 GenerateRegisterArgsPush(masm);
1634 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1635 break;
1636 case Token::MOD:
1637 GenerateRegisterArgsPush(masm);
1638 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1639 break;
1640 case Token::BIT_OR:
1641 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1642 break;
1643 case Token::BIT_AND:
1644 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1645 break;
1646 case Token::BIT_XOR:
1647 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1648 break;
1649 case Token::SAR:
1650 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1651 break;
1652 case Token::SHL:
1653 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1654 break;
1655 case Token::SHR:
1656 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1657 break;
1658 default:
1659 UNREACHABLE();
1660 }
1661}
1662
1663
Ben Murdoch257744e2011-11-30 15:57:28 +00001664void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
Steve Block44f0eee2011-05-26 01:26:41 +01001665 if (op_ == Token::ADD) {
1666 // Handle string addition here, because it is the only operation
1667 // that does not do a ToNumber conversion on the operands.
1668 GenerateAddStrings(masm);
1669 }
1670
Ben Murdoch257744e2011-11-30 15:57:28 +00001671 Factory* factory = masm->isolate()->factory();
1672
Steve Block44f0eee2011-05-26 01:26:41 +01001673 // Convert odd ball arguments to numbers.
Ben Murdoch257744e2011-11-30 15:57:28 +00001674 Label check, done;
1675 __ cmp(edx, factory->undefined_value());
1676 __ j(not_equal, &check, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001677 if (Token::IsBitOp(op_)) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001678 __ xor_(edx, Operand(edx));
Steve Block44f0eee2011-05-26 01:26:41 +01001679 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001680 __ mov(edx, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001681 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001682 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001683 __ bind(&check);
Ben Murdoch257744e2011-11-30 15:57:28 +00001684 __ cmp(eax, factory->undefined_value());
1685 __ j(not_equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001686 if (Token::IsBitOp(op_)) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001687 __ xor_(eax, Operand(eax));
Steve Block44f0eee2011-05-26 01:26:41 +01001688 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001689 __ mov(eax, Immediate(factory->nan_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01001690 }
1691 __ bind(&done);
1692
1693 GenerateHeapNumberStub(masm);
1694}
1695
1696
Ben Murdoch257744e2011-11-30 15:57:28 +00001697void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001698 Label call_runtime;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001699
1700 // Floating point case.
1701 switch (op_) {
1702 case Token::ADD:
1703 case Token::SUB:
1704 case Token::MUL:
1705 case Token::DIV: {
1706 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001707 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001708 CpuFeatures::Scope use_sse2(SSE2);
1709 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1710
1711 switch (op_) {
1712 case Token::ADD: __ addsd(xmm0, xmm1); break;
1713 case Token::SUB: __ subsd(xmm0, xmm1); break;
1714 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1715 case Token::DIV: __ divsd(xmm0, xmm1); break;
1716 default: UNREACHABLE();
1717 }
1718 GenerateHeapResultAllocation(masm, &call_runtime);
1719 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1720 __ ret(0);
1721 } else { // SSE2 not available, use FPU.
1722 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1723 FloatingPointHelper::LoadFloatOperands(
1724 masm,
1725 ecx,
1726 FloatingPointHelper::ARGS_IN_REGISTERS);
1727 switch (op_) {
1728 case Token::ADD: __ faddp(1); break;
1729 case Token::SUB: __ fsubp(1); break;
1730 case Token::MUL: __ fmulp(1); break;
1731 case Token::DIV: __ fdivp(1); break;
1732 default: UNREACHABLE();
1733 }
1734 Label after_alloc_failure;
1735 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1736 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1737 __ ret(0);
1738 __ bind(&after_alloc_failure);
1739 __ ffree();
1740 __ jmp(&call_runtime);
1741 }
1742
1743 __ bind(&not_floats);
1744 GenerateTypeTransition(masm);
1745 break;
1746 }
1747
1748 case Token::MOD: {
1749 // For MOD we go directly to runtime in the non-smi case.
1750 break;
1751 }
1752 case Token::BIT_OR:
1753 case Token::BIT_AND:
1754 case Token::BIT_XOR:
1755 case Token::SAR:
1756 case Token::SHL:
1757 case Token::SHR: {
1758 GenerateRegisterArgsPush(masm);
1759 Label not_floats;
1760 Label non_smi_result;
1761 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1762 use_sse3_,
1763 &not_floats);
1764 switch (op_) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001765 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1766 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1767 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001768 case Token::SAR: __ sar_cl(eax); break;
1769 case Token::SHL: __ shl_cl(eax); break;
1770 case Token::SHR: __ shr_cl(eax); break;
1771 default: UNREACHABLE();
1772 }
1773 if (op_ == Token::SHR) {
1774 // Check if result is non-negative and fits in a smi.
1775 __ test(eax, Immediate(0xc0000000));
1776 __ j(not_zero, &call_runtime);
1777 } else {
1778 // Check if result fits in a smi.
1779 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001780 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001781 }
1782 // Tag smi result and return.
1783 __ SmiTag(eax);
1784 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1785
1786 // All ops except SHR return a signed int32 that we load in
1787 // a HeapNumber.
1788 if (op_ != Token::SHR) {
1789 __ bind(&non_smi_result);
1790 // Allocate a heap number if needed.
Ben Murdoch85b71792012-04-11 18:30:58 +01001791 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001792 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001793 switch (mode_) {
1794 case OVERWRITE_LEFT:
1795 case OVERWRITE_RIGHT:
1796 // If the operand was an object, we skip the
1797 // allocation of a heap number.
1798 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1799 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001800 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001801 // Fall through!
1802 case NO_OVERWRITE:
1803 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1804 __ bind(&skip_allocation);
1805 break;
1806 default: UNREACHABLE();
1807 }
1808 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001809 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001810 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +01001811 __ cvtsi2sd(xmm0, Operand(ebx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1813 } else {
1814 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1815 __ fild_s(Operand(esp, 1 * kPointerSize));
1816 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1817 }
1818 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1819 }
1820
1821 __ bind(&not_floats);
1822 GenerateTypeTransitionWithSavedArgs(masm);
1823 break;
1824 }
1825 default: UNREACHABLE(); break;
1826 }
1827
1828 // If an allocation fails, or SHR or MOD hit a hard case,
1829 // use the runtime system to get the correct result.
1830 __ bind(&call_runtime);
1831
1832 switch (op_) {
1833 case Token::ADD:
1834 GenerateRegisterArgsPush(masm);
1835 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1836 break;
1837 case Token::SUB:
1838 GenerateRegisterArgsPush(masm);
1839 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1840 break;
1841 case Token::MUL:
1842 GenerateRegisterArgsPush(masm);
1843 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1844 break;
1845 case Token::DIV:
1846 GenerateRegisterArgsPush(masm);
1847 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1848 break;
1849 case Token::MOD:
1850 GenerateRegisterArgsPush(masm);
1851 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1852 break;
1853 case Token::BIT_OR:
1854 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1855 break;
1856 case Token::BIT_AND:
1857 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1858 break;
1859 case Token::BIT_XOR:
1860 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1861 break;
1862 case Token::SAR:
1863 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1864 break;
1865 case Token::SHL:
1866 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1867 break;
1868 case Token::SHR:
1869 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1870 break;
1871 default:
1872 UNREACHABLE();
1873 }
1874}
1875
1876
Ben Murdoch257744e2011-11-30 15:57:28 +00001877void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001878 Label call_runtime;
1879
Steve Block44f0eee2011-05-26 01:26:41 +01001880 Counters* counters = masm->isolate()->counters();
1881 __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001882
1883 switch (op_) {
1884 case Token::ADD:
1885 case Token::SUB:
1886 case Token::MUL:
1887 case Token::DIV:
1888 break;
1889 case Token::MOD:
1890 case Token::BIT_OR:
1891 case Token::BIT_AND:
1892 case Token::BIT_XOR:
1893 case Token::SAR:
1894 case Token::SHL:
1895 case Token::SHR:
1896 GenerateRegisterArgsPush(masm);
1897 break;
1898 default:
1899 UNREACHABLE();
1900 }
1901
1902 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1903
1904 // Floating point case.
1905 switch (op_) {
1906 case Token::ADD:
1907 case Token::SUB:
1908 case Token::MUL:
1909 case Token::DIV: {
1910 Label not_floats;
Ben Murdoch8b112d22011-06-08 16:22:53 +01001911 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001912 CpuFeatures::Scope use_sse2(SSE2);
1913 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1914
1915 switch (op_) {
1916 case Token::ADD: __ addsd(xmm0, xmm1); break;
1917 case Token::SUB: __ subsd(xmm0, xmm1); break;
1918 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1919 case Token::DIV: __ divsd(xmm0, xmm1); break;
1920 default: UNREACHABLE();
1921 }
1922 GenerateHeapResultAllocation(masm, &call_runtime);
1923 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1924 __ ret(0);
1925 } else { // SSE2 not available, use FPU.
1926 FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
1927 FloatingPointHelper::LoadFloatOperands(
1928 masm,
1929 ecx,
1930 FloatingPointHelper::ARGS_IN_REGISTERS);
1931 switch (op_) {
1932 case Token::ADD: __ faddp(1); break;
1933 case Token::SUB: __ fsubp(1); break;
1934 case Token::MUL: __ fmulp(1); break;
1935 case Token::DIV: __ fdivp(1); break;
1936 default: UNREACHABLE();
1937 }
1938 Label after_alloc_failure;
1939 GenerateHeapResultAllocation(masm, &after_alloc_failure);
1940 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1941 __ ret(0);
1942 __ bind(&after_alloc_failure);
1943 __ ffree();
1944 __ jmp(&call_runtime);
1945 }
1946 __ bind(&not_floats);
1947 break;
1948 }
1949 case Token::MOD: {
1950 // For MOD we go directly to runtime in the non-smi case.
1951 break;
1952 }
1953 case Token::BIT_OR:
1954 case Token::BIT_AND:
1955 case Token::BIT_XOR:
1956 case Token::SAR:
1957 case Token::SHL:
1958 case Token::SHR: {
1959 Label non_smi_result;
1960 FloatingPointHelper::LoadUnknownsAsIntegers(masm,
1961 use_sse3_,
1962 &call_runtime);
1963 switch (op_) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001964 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
1965 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
1966 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001967 case Token::SAR: __ sar_cl(eax); break;
1968 case Token::SHL: __ shl_cl(eax); break;
1969 case Token::SHR: __ shr_cl(eax); break;
1970 default: UNREACHABLE();
1971 }
1972 if (op_ == Token::SHR) {
1973 // Check if result is non-negative and fits in a smi.
1974 __ test(eax, Immediate(0xc0000000));
1975 __ j(not_zero, &call_runtime);
1976 } else {
1977 // Check if result fits in a smi.
1978 __ cmp(eax, 0xc0000000);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001979 __ j(negative, &non_smi_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001980 }
1981 // Tag smi result and return.
1982 __ SmiTag(eax);
1983 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
1984
1985 // All ops except SHR return a signed int32 that we load in
1986 // a HeapNumber.
1987 if (op_ != Token::SHR) {
1988 __ bind(&non_smi_result);
1989 // Allocate a heap number if needed.
Ben Murdoch85b71792012-04-11 18:30:58 +01001990 __ mov(ebx, Operand(eax)); // ebx: result
Ben Murdoch257744e2011-11-30 15:57:28 +00001991 Label skip_allocation;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001992 switch (mode_) {
1993 case OVERWRITE_LEFT:
1994 case OVERWRITE_RIGHT:
1995 // If the operand was an object, we skip the
1996 // allocation of a heap number.
1997 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1998 1 * kPointerSize : 2 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001999 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002000 // Fall through!
2001 case NO_OVERWRITE:
2002 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2003 __ bind(&skip_allocation);
2004 break;
2005 default: UNREACHABLE();
2006 }
2007 // Store the result in the HeapNumber and return.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002008 if (CpuFeatures::IsSupported(SSE2)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002009 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +01002010 __ cvtsi2sd(xmm0, Operand(ebx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002011 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2012 } else {
2013 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2014 __ fild_s(Operand(esp, 1 * kPointerSize));
2015 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2016 }
2017 __ ret(2 * kPointerSize);
2018 }
2019 break;
2020 }
2021 default: UNREACHABLE(); break;
2022 }
2023
2024 // If all else fails, use the runtime system to get the correct
2025 // result.
2026 __ bind(&call_runtime);
2027 switch (op_) {
2028 case Token::ADD: {
Steve Block1e0659c2011-05-24 12:43:12 +01002029 GenerateAddStrings(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002030 GenerateRegisterArgsPush(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002031 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2032 break;
2033 }
2034 case Token::SUB:
2035 GenerateRegisterArgsPush(masm);
2036 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2037 break;
2038 case Token::MUL:
2039 GenerateRegisterArgsPush(masm);
2040 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
2041 break;
2042 case Token::DIV:
2043 GenerateRegisterArgsPush(masm);
2044 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
2045 break;
2046 case Token::MOD:
2047 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
2048 break;
2049 case Token::BIT_OR:
2050 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
2051 break;
2052 case Token::BIT_AND:
2053 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
2054 break;
2055 case Token::BIT_XOR:
2056 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
2057 break;
2058 case Token::SAR:
2059 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
2060 break;
2061 case Token::SHL:
2062 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2063 break;
2064 case Token::SHR:
2065 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2066 break;
2067 default:
2068 UNREACHABLE();
2069 }
2070}
2071
2072
Ben Murdoch257744e2011-11-30 15:57:28 +00002073void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002074 ASSERT(op_ == Token::ADD);
Ben Murdoch257744e2011-11-30 15:57:28 +00002075 Label left_not_string, call_runtime;
Steve Block1e0659c2011-05-24 12:43:12 +01002076
2077 // Registers containing left and right operands respectively.
2078 Register left = edx;
2079 Register right = eax;
2080
2081 // Test if left operand is a string.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002082 __ JumpIfSmi(left, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002083 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002084 __ j(above_equal, &left_not_string, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002085
2086 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2087 GenerateRegisterArgsPush(masm);
2088 __ TailCallStub(&string_add_left_stub);
2089
2090 // Left operand is not a string, test right.
2091 __ bind(&left_not_string);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002092 __ JumpIfSmi(right, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002093 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002094 __ j(above_equal, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002095
2096 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2097 GenerateRegisterArgsPush(masm);
2098 __ TailCallStub(&string_add_right_stub);
2099
2100 // Neither argument is a string.
2101 __ bind(&call_runtime);
2102}
2103
2104
Ben Murdoch257744e2011-11-30 15:57:28 +00002105void BinaryOpStub::GenerateHeapResultAllocation(
Ben Murdochb0fe1622011-05-05 13:52:32 +01002106 MacroAssembler* masm,
2107 Label* alloc_failure) {
2108 Label skip_allocation;
2109 OverwriteMode mode = mode_;
2110 switch (mode) {
2111 case OVERWRITE_LEFT: {
2112 // If the argument in edx is already an object, we skip the
2113 // allocation of a heap number.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002114 __ JumpIfNotSmi(edx, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002115 // Allocate a heap number for the result. Keep eax and edx intact
2116 // for the possible runtime call.
2117 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2118 // Now edx can be overwritten losing one of the arguments as we are
2119 // now done and will not need it any more.
Ben Murdoch85b71792012-04-11 18:30:58 +01002120 __ mov(edx, Operand(ebx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002121 __ bind(&skip_allocation);
2122 // Use object in edx as a result holder
Ben Murdoch85b71792012-04-11 18:30:58 +01002123 __ mov(eax, Operand(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002124 break;
2125 }
2126 case OVERWRITE_RIGHT:
2127 // If the argument in eax is already an object, we skip the
2128 // allocation of a heap number.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002129 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002130 // Fall through!
2131 case NO_OVERWRITE:
2132 // Allocate a heap number for the result. Keep eax and edx intact
2133 // for the possible runtime call.
2134 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2135 // Now eax can be overwritten losing one of the arguments as we are
2136 // now done and will not need it any more.
2137 __ mov(eax, ebx);
2138 __ bind(&skip_allocation);
2139 break;
2140 default: UNREACHABLE();
2141 }
2142}
2143
2144
Ben Murdoch257744e2011-11-30 15:57:28 +00002145void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002146 __ pop(ecx);
2147 __ push(edx);
2148 __ push(eax);
2149 __ push(ecx);
2150}
2151
2152
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002153void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002154 // TAGGED case:
2155 // Input:
2156 // esp[4]: tagged number input argument (should be number).
2157 // esp[0]: return address.
2158 // Output:
2159 // eax: tagged double result.
2160 // UNTAGGED case:
2161 // Input::
2162 // esp[0]: return address.
2163 // xmm1: untagged double input argument
2164 // Output:
2165 // xmm1: untagged double result.
2166
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002167 Label runtime_call;
2168 Label runtime_call_clear_stack;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002169 Label skip_cache;
2170 const bool tagged = (argument_type_ == TAGGED);
2171 if (tagged) {
2172 // Test that eax is a number.
Ben Murdoch257744e2011-11-30 15:57:28 +00002173 Label input_not_smi;
2174 Label loaded;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002175 __ mov(eax, Operand(esp, kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002176 __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002177 // Input is a smi. Untag and load it onto the FPU stack.
2178 // Then load the low and high words of the double into ebx, edx.
2179 STATIC_ASSERT(kSmiTagSize == 1);
2180 __ sar(eax, 1);
Ben Murdoch85b71792012-04-11 18:30:58 +01002181 __ sub(Operand(esp), Immediate(2 * kPointerSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002182 __ mov(Operand(esp, 0), eax);
2183 __ fild_s(Operand(esp, 0));
2184 __ fst_d(Operand(esp, 0));
2185 __ pop(edx);
2186 __ pop(ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002187 __ jmp(&loaded, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002188 __ bind(&input_not_smi);
2189 // Check if input is a HeapNumber.
2190 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002191 Factory* factory = masm->isolate()->factory();
Ben Murdoch85b71792012-04-11 18:30:58 +01002192 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002193 __ j(not_equal, &runtime_call);
2194 // Input is a HeapNumber. Push it on the FPU stack and load its
2195 // low and high words into ebx, edx.
2196 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2197 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2198 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002199
Ben Murdochb0fe1622011-05-05 13:52:32 +01002200 __ bind(&loaded);
2201 } else { // UNTAGGED.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002202 if (CpuFeatures::IsSupported(SSE4_1)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002203 CpuFeatures::Scope sse4_scope(SSE4_1);
Ben Murdoch85b71792012-04-11 18:30:58 +01002204 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002205 } else {
2206 __ pshufd(xmm0, xmm1, 0x1);
Ben Murdoch85b71792012-04-11 18:30:58 +01002207 __ movd(Operand(edx), xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002208 }
Ben Murdoch85b71792012-04-11 18:30:58 +01002209 __ movd(Operand(ebx), xmm1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002210 }
2211
2212 // ST[0] or xmm1 == double value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002213 // ebx = low 32 bits of double value
2214 // edx = high 32 bits of double value
2215 // Compute hash (the shifts are arithmetic):
2216 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
2217 __ mov(ecx, ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01002218 __ xor_(ecx, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002219 __ mov(eax, ecx);
2220 __ sar(eax, 16);
Ben Murdoch85b71792012-04-11 18:30:58 +01002221 __ xor_(ecx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002222 __ mov(eax, ecx);
2223 __ sar(eax, 8);
Ben Murdoch85b71792012-04-11 18:30:58 +01002224 __ xor_(ecx, Operand(eax));
Steve Block44f0eee2011-05-26 01:26:41 +01002225 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01002226 __ and_(Operand(ecx),
Steve Block44f0eee2011-05-26 01:26:41 +01002227 Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002228
Ben Murdochb0fe1622011-05-05 13:52:32 +01002229 // ST[0] or xmm1 == double value.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002230 // ebx = low 32 bits of double value.
2231 // edx = high 32 bits of double value.
2232 // ecx = TranscendentalCache::hash(double value).
Steve Block44f0eee2011-05-26 01:26:41 +01002233 ExternalReference cache_array =
2234 ExternalReference::transcendental_cache_array_address(masm->isolate());
2235 __ mov(eax, Immediate(cache_array));
2236 int cache_array_index =
2237 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
2238 __ mov(eax, Operand(eax, cache_array_index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002239 // Eax points to the cache for the type type_.
2240 // If NULL, the cache hasn't been initialized yet, so go through runtime.
Ben Murdoch85b71792012-04-11 18:30:58 +01002241 __ test(eax, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002242 __ j(zero, &runtime_call_clear_stack);
2243#ifdef DEBUG
2244 // Check that the layout of cache elements match expectations.
Steve Block44f0eee2011-05-26 01:26:41 +01002245 { TranscendentalCache::SubCache::Element test_elem[2];
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002246 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2247 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2248 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
2249 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
2250 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
2251 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2252 CHECK_EQ(0, elem_in0 - elem_start);
2253 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2254 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2255 }
2256#endif
2257 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2258 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2259 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2260 // Check if cache matches: Double value is stored in uint32_t[2] array.
Ben Murdoch257744e2011-11-30 15:57:28 +00002261 Label cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262 __ cmp(ebx, Operand(ecx, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002263 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002264 __ cmp(edx, Operand(ecx, kIntSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002265 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002266 // Cache hit!
2267 __ mov(eax, Operand(ecx, 2 * kIntSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002268 if (tagged) {
2269 __ fstp(0);
2270 __ ret(kPointerSize);
2271 } else { // UNTAGGED.
2272 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2273 __ Ret();
2274 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002275
2276 __ bind(&cache_miss);
2277 // Update cache with new value.
2278 // We are short on registers, so use no_reg as scratch.
2279 // This gives slightly larger code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002280 if (tagged) {
2281 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
2282 } else { // UNTAGGED.
2283 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
Ben Murdoch85b71792012-04-11 18:30:58 +01002284 __ sub(Operand(esp), Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002285 __ movdbl(Operand(esp, 0), xmm1);
2286 __ fld_d(Operand(esp, 0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002287 __ add(Operand(esp), Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002288 }
Ben Murdoch85b71792012-04-11 18:30:58 +01002289 GenerateOperation(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002290 __ mov(Operand(ecx, 0), ebx);
2291 __ mov(Operand(ecx, kIntSize), edx);
2292 __ mov(Operand(ecx, 2 * kIntSize), eax);
2293 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002294 if (tagged) {
2295 __ ret(kPointerSize);
2296 } else { // UNTAGGED.
2297 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2298 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002299
Ben Murdochb0fe1622011-05-05 13:52:32 +01002300 // Skip cache and return answer directly, only in untagged case.
2301 __ bind(&skip_cache);
Ben Murdoch85b71792012-04-11 18:30:58 +01002302 __ sub(Operand(esp), Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002303 __ movdbl(Operand(esp, 0), xmm1);
2304 __ fld_d(Operand(esp, 0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002305 GenerateOperation(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002306 __ fstp_d(Operand(esp, 0));
2307 __ movdbl(xmm1, Operand(esp, 0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002308 __ add(Operand(esp), Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002309 // We return the value in xmm1 without adding it to the cache, but
2310 // we cause a scavenging GC so that future allocations will succeed.
Ben Murdoch85b71792012-04-11 18:30:58 +01002311 __ EnterInternalFrame();
2312 // Allocate an unused object bigger than a HeapNumber.
2313 __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
2314 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2315 __ LeaveInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002316 __ Ret();
2317 }
2318
2319 // Call runtime, doing whatever allocation and cleanup is necessary.
2320 if (tagged) {
2321 __ bind(&runtime_call_clear_stack);
2322 __ fstp(0);
2323 __ bind(&runtime_call);
Steve Block44f0eee2011-05-26 01:26:41 +01002324 ExternalReference runtime =
2325 ExternalReference(RuntimeFunction(), masm->isolate());
2326 __ TailCallExternalReference(runtime, 1, 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002327 } else { // UNTAGGED.
2328 __ bind(&runtime_call_clear_stack);
2329 __ bind(&runtime_call);
2330 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2331 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
Ben Murdoch85b71792012-04-11 18:30:58 +01002332 __ EnterInternalFrame();
2333 __ push(eax);
2334 __ CallRuntime(RuntimeFunction(), 1);
2335 __ LeaveInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002336 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2337 __ Ret();
2338 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002339}
2340
2341
2342Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
2343 switch (type_) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002344 case TranscendentalCache::SIN: return Runtime::kMath_sin;
2345 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002346 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002347 default:
2348 UNIMPLEMENTED();
2349 return Runtime::kAbort;
2350 }
2351}
2352
2353
Ben Murdoch85b71792012-04-11 18:30:58 +01002354void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002355 // Only free register is edi.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002356 // Input value is on FP stack, and also in ebx/edx.
2357 // Input value is possibly in xmm1.
2358 // Address of result (a newly allocated HeapNumber) may be in eax.
Ben Murdoch85b71792012-04-11 18:30:58 +01002359 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002360 // Both fsin and fcos require arguments in the range +/-2^63 and
2361 // return NaN for infinities and NaN. They can share all code except
2362 // the actual fsin/fcos operation.
Ben Murdoch257744e2011-11-30 15:57:28 +00002363 Label in_range, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002364 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2365 // work. We must reduce it to the appropriate range.
2366 __ mov(edi, edx);
Ben Murdoch85b71792012-04-11 18:30:58 +01002367 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002368 int supported_exponent_limit =
2369 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
Ben Murdoch85b71792012-04-11 18:30:58 +01002370 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00002371 __ j(below, &in_range, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002372 // Check for infinity and NaN. Both return NaN for sin.
Ben Murdoch85b71792012-04-11 18:30:58 +01002373 __ cmp(Operand(edi), Immediate(0x7ff00000));
Ben Murdoch257744e2011-11-30 15:57:28 +00002374 Label non_nan_result;
2375 __ j(not_equal, &non_nan_result, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002376 // Input is +/-Infinity or NaN. Result is NaN.
2377 __ fstp(0);
2378 // NaN is represented by 0x7ff8000000000000.
2379 __ push(Immediate(0x7ff80000));
2380 __ push(Immediate(0));
2381 __ fld_d(Operand(esp, 0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002382 __ add(Operand(esp), Immediate(2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002383 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002384
Ben Murdochb0fe1622011-05-05 13:52:32 +01002385 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002386
Ben Murdochb0fe1622011-05-05 13:52:32 +01002387 // Use fpmod to restrict argument to the range +/-2*PI.
2388 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2389 __ fldpi();
2390 __ fadd(0);
2391 __ fld(1);
2392 // FPU Stack: input, 2*pi, input.
2393 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002394 Label no_exceptions;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002395 __ fwait();
2396 __ fnstsw_ax();
2397 // Clear if Illegal Operand or Zero Division exceptions are set.
Ben Murdoch85b71792012-04-11 18:30:58 +01002398 __ test(Operand(eax), Immediate(5));
Ben Murdoch257744e2011-11-30 15:57:28 +00002399 __ j(zero, &no_exceptions, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002400 __ fnclex();
2401 __ bind(&no_exceptions);
2402 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002403
Ben Murdochb0fe1622011-05-05 13:52:32 +01002404 // Compute st(0) % st(1)
2405 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002406 Label partial_remainder_loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002407 __ bind(&partial_remainder_loop);
2408 __ fprem1();
2409 __ fwait();
2410 __ fnstsw_ax();
Ben Murdoch85b71792012-04-11 18:30:58 +01002411 __ test(Operand(eax), Immediate(0x400 /* C2 */));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002412 // If C2 is set, computation only has partial result. Loop to
2413 // continue computation.
2414 __ j(not_zero, &partial_remainder_loop);
2415 }
2416 // FPU Stack: input, 2*pi, input % 2*pi
2417 __ fstp(2);
2418 __ fstp(0);
2419 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
2420
2421 // FPU Stack: input % 2*pi
2422 __ bind(&in_range);
Ben Murdoch85b71792012-04-11 18:30:58 +01002423 switch (type_) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002424 case TranscendentalCache::SIN:
2425 __ fsin();
2426 break;
2427 case TranscendentalCache::COS:
2428 __ fcos();
2429 break;
2430 default:
2431 UNREACHABLE();
2432 }
2433 __ bind(&done);
2434 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01002435 ASSERT(type_ == TranscendentalCache::LOG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002436 __ fldln2();
2437 __ fxch();
2438 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002439 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002440}
2441
2442
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002443// Input: edx, eax are the left and right objects of a bit op.
2444// Output: eax, ecx are left and right integers for a bit op.
2445void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2446 bool use_sse3,
2447 Label* conversion_failure) {
2448 // Check float operands.
2449 Label arg1_is_object, check_undefined_arg1;
2450 Label arg2_is_object, check_undefined_arg2;
2451 Label load_arg2, done;
2452
2453 // Test if arg1 is a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002454 __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002455
2456 __ SmiUntag(edx);
2457 __ jmp(&load_arg2);
2458
2459 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2460 __ bind(&check_undefined_arg1);
Steve Block44f0eee2011-05-26 01:26:41 +01002461 Factory* factory = masm->isolate()->factory();
2462 __ cmp(edx, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002463 __ j(not_equal, conversion_failure);
2464 __ mov(edx, Immediate(0));
2465 __ jmp(&load_arg2);
2466
2467 __ bind(&arg1_is_object);
2468 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002469 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002470 __ j(not_equal, &check_undefined_arg1);
2471
2472 // Get the untagged integer version of the edx heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002473 IntegerConvert(masm, edx, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002474 __ mov(edx, ecx);
2475
2476 // Here edx has the untagged integer, eax has a Smi or a heap number.
2477 __ bind(&load_arg2);
2478
2479 // Test if arg2 is a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002480 __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002481
2482 __ SmiUntag(eax);
2483 __ mov(ecx, eax);
2484 __ jmp(&done);
2485
2486 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2487 __ bind(&check_undefined_arg2);
Steve Block44f0eee2011-05-26 01:26:41 +01002488 __ cmp(eax, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002489 __ j(not_equal, conversion_failure);
2490 __ mov(ecx, Immediate(0));
2491 __ jmp(&done);
2492
2493 __ bind(&arg2_is_object);
2494 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002495 __ cmp(ebx, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002496 __ j(not_equal, &check_undefined_arg2);
2497
2498 // Get the untagged integer version of the eax heap number in ecx.
Ben Murdoch257744e2011-11-30 15:57:28 +00002499 IntegerConvert(masm, eax, use_sse3, conversion_failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002500 __ bind(&done);
2501 __ mov(eax, edx);
2502}
2503
2504
Ben Murdochb0fe1622011-05-05 13:52:32 +01002505void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
2506 bool use_sse3,
2507 Label* not_int32) {
2508 return;
2509}
2510
2511
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002512void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2513 Register number) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002514 Label load_smi, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002515
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002516 __ JumpIfSmi(number, &load_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002517 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002518 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002519
2520 __ bind(&load_smi);
2521 __ SmiUntag(number);
2522 __ push(number);
2523 __ fild_s(Operand(esp, 0));
2524 __ pop(number);
2525
2526 __ bind(&done);
2527}
2528
2529
2530void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002531 Label load_smi_edx, load_eax, load_smi_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002532 // Load operand in edx into xmm0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002533 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002534 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2535
2536 __ bind(&load_eax);
2537 // Load operand in eax into xmm1.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002538 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002539 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002540 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002541
2542 __ bind(&load_smi_edx);
2543 __ SmiUntag(edx); // Untag smi before converting to float.
Ben Murdoch85b71792012-04-11 18:30:58 +01002544 __ cvtsi2sd(xmm0, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002545 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2546 __ jmp(&load_eax);
2547
2548 __ bind(&load_smi_eax);
2549 __ SmiUntag(eax); // Untag smi before converting to float.
Ben Murdoch85b71792012-04-11 18:30:58 +01002550 __ cvtsi2sd(xmm1, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002551 __ SmiTag(eax); // Retag smi for heap number overwriting test.
2552
2553 __ bind(&done);
2554}
2555
2556
2557void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2558 Label* not_numbers) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002560 // Load operand in edx into xmm0, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002561 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002562 Factory* factory = masm->isolate()->factory();
2563 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002564 __ j(not_equal, not_numbers); // Argument in edx is not a number.
2565 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2566 __ bind(&load_eax);
2567 // Load operand in eax into xmm1, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002568 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002569 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +00002570 __ j(equal, &load_float_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002571 __ jmp(not_numbers); // Argument in eax is not a number.
2572 __ bind(&load_smi_edx);
2573 __ SmiUntag(edx); // Untag smi before converting to float.
Ben Murdoch85b71792012-04-11 18:30:58 +01002574 __ cvtsi2sd(xmm0, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002575 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2576 __ jmp(&load_eax);
2577 __ bind(&load_smi_eax);
2578 __ SmiUntag(eax); // Untag smi before converting to float.
Ben Murdoch85b71792012-04-11 18:30:58 +01002579 __ cvtsi2sd(xmm1, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002580 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Ben Murdoch257744e2011-11-30 15:57:28 +00002581 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002582 __ bind(&load_float_eax);
2583 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2584 __ bind(&done);
2585}
2586
2587
2588void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
2589 Register scratch) {
2590 const Register left = edx;
2591 const Register right = eax;
2592 __ mov(scratch, left);
2593 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2594 __ SmiUntag(scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01002595 __ cvtsi2sd(xmm0, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002596
2597 __ mov(scratch, right);
2598 __ SmiUntag(scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01002599 __ cvtsi2sd(xmm1, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002600}
2601
2602
Ben Murdochb0fe1622011-05-05 13:52:32 +01002603void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm,
2604 Label* non_int32,
2605 Register scratch) {
2606 __ cvttsd2si(scratch, Operand(xmm0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002607 __ cvtsi2sd(xmm2, Operand(scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002608 __ ucomisd(xmm0, xmm2);
2609 __ j(not_zero, non_int32);
2610 __ j(carry, non_int32);
2611 __ cvttsd2si(scratch, Operand(xmm1));
Ben Murdoch85b71792012-04-11 18:30:58 +01002612 __ cvtsi2sd(xmm2, Operand(scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002613 __ ucomisd(xmm1, xmm2);
2614 __ j(not_zero, non_int32);
2615 __ j(carry, non_int32);
2616}
2617
2618
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002619void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2620 Register scratch,
2621 ArgLocation arg_location) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002622 Label load_smi_1, load_smi_2, done_load_1, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002623 if (arg_location == ARGS_IN_REGISTERS) {
2624 __ mov(scratch, edx);
2625 } else {
2626 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2627 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002628 __ JumpIfSmi(scratch, &load_smi_1, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002629 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2630 __ bind(&done_load_1);
2631
2632 if (arg_location == ARGS_IN_REGISTERS) {
2633 __ mov(scratch, eax);
2634 } else {
2635 __ mov(scratch, Operand(esp, 1 * kPointerSize));
2636 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002637 __ JumpIfSmi(scratch, &load_smi_2, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002638 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002639 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002640
2641 __ bind(&load_smi_1);
2642 __ SmiUntag(scratch);
2643 __ push(scratch);
2644 __ fild_s(Operand(esp, 0));
2645 __ pop(scratch);
2646 __ jmp(&done_load_1);
2647
2648 __ bind(&load_smi_2);
2649 __ SmiUntag(scratch);
2650 __ push(scratch);
2651 __ fild_s(Operand(esp, 0));
2652 __ pop(scratch);
2653
2654 __ bind(&done);
2655}
2656
2657
2658void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
2659 Register scratch) {
2660 const Register left = edx;
2661 const Register right = eax;
2662 __ mov(scratch, left);
2663 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
2664 __ SmiUntag(scratch);
2665 __ push(scratch);
2666 __ fild_s(Operand(esp, 0));
2667
2668 __ mov(scratch, right);
2669 __ SmiUntag(scratch);
2670 __ mov(Operand(esp, 0), scratch);
2671 __ fild_s(Operand(esp, 0));
2672 __ pop(scratch);
2673}
2674
2675
2676void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2677 Label* non_float,
2678 Register scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002679 Label test_other, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002680 // Test if both operands are floats or smi -> scratch=k_is_float;
2681 // Otherwise scratch = k_not_float.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002682 __ JumpIfSmi(edx, &test_other, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002683 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002684 Factory* factory = masm->isolate()->factory();
2685 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002686 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
2687
2688 __ bind(&test_other);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002689 __ JumpIfSmi(eax, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002690 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002691 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002692 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
2693
2694 // Fall-through: Both operands are numbers.
2695 __ bind(&done);
2696}
2697
2698
Ben Murdochb0fe1622011-05-05 13:52:32 +01002699void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
2700 Label* non_int32) {
2701 return;
2702}
2703
2704
Ben Murdochb0fe1622011-05-05 13:52:32 +01002705void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002706 // Registers are used as follows:
2707 // edx = base
2708 // eax = exponent
2709 // ecx = temporary, result
2710
Ben Murdochb0fe1622011-05-05 13:52:32 +01002711 CpuFeatures::Scope use_sse2(SSE2);
Ben Murdoch85b71792012-04-11 18:30:58 +01002712 Label allocate_return, call_runtime;
2713
2714 // Load input parameters.
2715 __ mov(edx, Operand(esp, 2 * kPointerSize));
2716 __ mov(eax, Operand(esp, 1 * kPointerSize));
2717
2718 // Save 1 in xmm3 - we need this several times later on.
2719 __ mov(ecx, Immediate(1));
2720 __ cvtsi2sd(xmm3, Operand(ecx));
2721
2722 Label exponent_nonsmi;
2723 Label base_nonsmi;
2724 // If the exponent is a heap number go to that specific case.
2725 __ JumpIfNotSmi(eax, &exponent_nonsmi);
2726 __ JumpIfNotSmi(edx, &base_nonsmi);
2727
2728 // Optimized version when both exponent and base are smis.
2729 Label powi;
2730 __ SmiUntag(edx);
2731 __ cvtsi2sd(xmm0, Operand(edx));
2732 __ jmp(&powi);
2733 // exponent is smi and base is a heapnumber.
2734 __ bind(&base_nonsmi);
Steve Block44f0eee2011-05-26 01:26:41 +01002735 Factory* factory = masm->isolate()->factory();
Ben Murdoch85b71792012-04-11 18:30:58 +01002736 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2737 factory->heap_number_map());
2738 __ j(not_equal, &call_runtime);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002739
Ben Murdoch85b71792012-04-11 18:30:58 +01002740 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002741
Ben Murdoch85b71792012-04-11 18:30:58 +01002742 // Optimized version of pow if exponent is a smi.
2743 // xmm0 contains the base.
2744 __ bind(&powi);
2745 __ SmiUntag(eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002746
Ben Murdoch85b71792012-04-11 18:30:58 +01002747 // Save exponent in base as we need to check if exponent is negative later.
2748 // We know that base and exponent are in different registers.
2749 __ mov(edx, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002750
2751 // Get absolute value of exponent.
Ben Murdoch85b71792012-04-11 18:30:58 +01002752 Label no_neg;
2753 __ cmp(eax, 0);
2754 __ j(greater_equal, &no_neg, Label::kNear);
2755 __ neg(eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002756 __ bind(&no_neg);
2757
Ben Murdoch85b71792012-04-11 18:30:58 +01002758 // Load xmm1 with 1.
2759 __ movsd(xmm1, xmm3);
2760 Label while_true;
2761 Label no_multiply;
Ben Murdochc7cc0282012-03-05 14:35:55 +00002762
Ben Murdoch85b71792012-04-11 18:30:58 +01002763 __ bind(&while_true);
2764 __ shr(eax, 1);
2765 __ j(not_carry, &no_multiply, Label::kNear);
2766 __ mulsd(xmm1, xmm0);
2767 __ bind(&no_multiply);
2768 __ mulsd(xmm0, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002769 __ j(not_zero, &while_true);
2770
Ben Murdoch85b71792012-04-11 18:30:58 +01002771 // base has the original value of the exponent - if the exponent is
2772 // negative return 1/result.
2773 __ test(edx, Operand(edx));
2774 __ j(positive, &allocate_return);
2775 // Special case if xmm1 has reached infinity.
2776 __ mov(ecx, Immediate(0x7FB00000));
2777 __ movd(xmm0, Operand(ecx));
2778 __ cvtss2sd(xmm0, xmm0);
2779 __ ucomisd(xmm0, xmm1);
2780 __ j(equal, &call_runtime);
2781 __ divsd(xmm3, xmm1);
2782 __ movsd(xmm1, xmm3);
2783 __ jmp(&allocate_return);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002784
Ben Murdoch85b71792012-04-11 18:30:58 +01002785 // exponent (or both) is a heapnumber - no matter what we should now work
2786 // on doubles.
2787 __ bind(&exponent_nonsmi);
2788 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2789 factory->heap_number_map());
2790 __ j(not_equal, &call_runtime);
2791 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2792 // Test if exponent is nan.
2793 __ ucomisd(xmm1, xmm1);
2794 __ j(parity_even, &call_runtime);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002795
Ben Murdoch85b71792012-04-11 18:30:58 +01002796 Label base_not_smi;
2797 Label handle_special_cases;
2798 __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear);
2799 __ SmiUntag(edx);
2800 __ cvtsi2sd(xmm0, Operand(edx));
2801 __ jmp(&handle_special_cases, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002802
Ben Murdoch85b71792012-04-11 18:30:58 +01002803 __ bind(&base_not_smi);
2804 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2805 factory->heap_number_map());
2806 __ j(not_equal, &call_runtime);
2807 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
2808 __ and_(ecx, HeapNumber::kExponentMask);
2809 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
2810 // base is NaN or +/-Infinity
2811 __ j(greater_equal, &call_runtime);
2812 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2813
2814 // base is in xmm0 and exponent is in xmm1.
2815 __ bind(&handle_special_cases);
2816 Label not_minus_half;
2817 // Test for -0.5.
2818 // Load xmm2 with -0.5.
2819 __ mov(ecx, Immediate(0xBF000000));
2820 __ movd(xmm2, Operand(ecx));
2821 __ cvtss2sd(xmm2, xmm2);
2822 // xmm2 now has -0.5.
2823 __ ucomisd(xmm2, xmm1);
2824 __ j(not_equal, &not_minus_half, Label::kNear);
2825
2826 // Calculates reciprocal of square root.
2827 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2828 __ xorps(xmm1, xmm1);
2829 __ addsd(xmm1, xmm0);
2830 __ sqrtsd(xmm1, xmm1);
2831 __ divsd(xmm3, xmm1);
2832 __ movsd(xmm1, xmm3);
2833 __ jmp(&allocate_return);
2834
2835 // Test for 0.5.
2836 __ bind(&not_minus_half);
2837 // Load xmm2 with 0.5.
2838 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2839 __ addsd(xmm2, xmm3);
2840 // xmm2 now has 0.5.
2841 __ ucomisd(xmm2, xmm1);
2842 __ j(not_equal, &call_runtime);
2843 // Calculates square root.
2844 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2845 __ xorps(xmm1, xmm1);
2846 __ addsd(xmm1, xmm0);
2847 __ sqrtsd(xmm1, xmm1);
2848
2849 __ bind(&allocate_return);
2850 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
2851 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
2852 __ mov(eax, ecx);
2853 __ ret(2 * kPointerSize);
2854
2855 __ bind(&call_runtime);
2856 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002857}
2858
2859
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002860void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2861 // The key is in edx and the parameter count is in eax.
2862
2863 // The displacement is used for skipping the frame pointer on the
2864 // stack. It is the offset of the last parameter (if any) relative
2865 // to the frame pointer.
2866 static const int kDisplacement = 1 * kPointerSize;
2867
2868 // Check that the key is a smi.
2869 Label slow;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002870 __ JumpIfNotSmi(edx, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002871
2872 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch257744e2011-11-30 15:57:28 +00002873 Label adaptor;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002874 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2875 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01002876 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002877 __ j(equal, &adaptor, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002878
2879 // Check index against formal parameters count limit passed in
2880 // through register eax. Use unsigned comparison to get negative
2881 // check for free.
Ben Murdoch85b71792012-04-11 18:30:58 +01002882 __ cmp(edx, Operand(eax));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002883 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002884
2885 // Read the argument from the stack and return it.
2886 STATIC_ASSERT(kSmiTagSize == 1);
2887 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2888 __ lea(ebx, Operand(ebp, eax, times_2, 0));
2889 __ neg(edx);
2890 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2891 __ ret(0);
2892
2893 // Arguments adaptor case: Check index against actual arguments
2894 // limit found in the arguments adaptor frame. Use unsigned
2895 // comparison to get negative check for free.
2896 __ bind(&adaptor);
2897 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01002898 __ cmp(edx, Operand(ecx));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002899 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002900
2901 // Read the argument from the stack and return it.
2902 STATIC_ASSERT(kSmiTagSize == 1);
2903 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2904 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
2905 __ neg(edx);
2906 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2907 __ ret(0);
2908
2909 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2910 // by calling the runtime system.
2911 __ bind(&slow);
2912 __ pop(ebx); // Return address.
2913 __ push(edx);
2914 __ push(ebx);
2915 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2916}
2917
2918
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002919void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002920 // esp[0] : return address
2921 // esp[4] : number of parameters
2922 // esp[8] : receiver displacement
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002923 // esp[12] : function
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002924
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002925 // Check if the calling frame is an arguments adaptor frame.
2926 Label runtime;
2927 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2928 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01002929 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002930 __ j(not_equal, &runtime, Label::kNear);
2931
2932 // Patch the arguments.length and the parameters pointer.
2933 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2934 __ mov(Operand(esp, 1 * kPointerSize), ecx);
2935 __ lea(edx, Operand(edx, ecx, times_2,
2936 StandardFrameConstants::kCallerSPOffset));
2937 __ mov(Operand(esp, 2 * kPointerSize), edx);
2938
2939 __ bind(&runtime);
2940 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2941}
2942
2943
2944void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
2945 // esp[0] : return address
2946 // esp[4] : number of parameters (tagged)
2947 // esp[8] : receiver displacement
2948 // esp[12] : function
2949
2950 // ebx = parameter count (tagged)
2951 __ mov(ebx, Operand(esp, 1 * kPointerSize));
2952
2953 // Check if the calling frame is an arguments adaptor frame.
2954 // TODO(rossberg): Factor out some of the bits that are shared with the other
2955 // Generate* functions.
2956 Label runtime;
2957 Label adaptor_frame, try_allocate;
2958 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2959 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01002960 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002961 __ j(equal, &adaptor_frame, Label::kNear);
2962
2963 // No adaptor, parameter count = argument count.
2964 __ mov(ecx, ebx);
2965 __ jmp(&try_allocate, Label::kNear);
2966
2967 // We have an adaptor frame. Patch the parameters pointer.
2968 __ bind(&adaptor_frame);
2969 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2970 __ lea(edx, Operand(edx, ecx, times_2,
2971 StandardFrameConstants::kCallerSPOffset));
2972 __ mov(Operand(esp, 2 * kPointerSize), edx);
2973
2974 // ebx = parameter count (tagged)
2975 // ecx = argument count (tagged)
2976 // esp[4] = parameter count (tagged)
2977 // esp[8] = address of receiver argument
2978 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
Ben Murdoch85b71792012-04-11 18:30:58 +01002979 __ cmp(ebx, Operand(ecx));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002980 __ j(less_equal, &try_allocate, Label::kNear);
2981 __ mov(ebx, ecx);
2982
2983 __ bind(&try_allocate);
2984
2985 // Save mapped parameter count.
2986 __ push(ebx);
2987
2988 // Compute the sizes of backing store, parameter map, and arguments object.
2989 // 1. Parameter map, has 2 extra words containing context and backing store.
2990 const int kParameterMapHeaderSize =
2991 FixedArray::kHeaderSize + 2 * kPointerSize;
2992 Label no_parameter_map;
Ben Murdoch85b71792012-04-11 18:30:58 +01002993 __ test(ebx, Operand(ebx));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002994 __ j(zero, &no_parameter_map, Label::kNear);
2995 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
2996 __ bind(&no_parameter_map);
2997
2998 // 2. Backing store.
2999 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
3000
3001 // 3. Arguments object.
Ben Murdoch85b71792012-04-11 18:30:58 +01003002 __ add(Operand(ebx), Immediate(Heap::kArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003003
3004 // Do the allocation of all three objects in one go.
3005 __ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
3006
3007 // eax = address of new object(s) (tagged)
3008 // ecx = argument count (tagged)
3009 // esp[0] = mapped parameter count (tagged)
3010 // esp[8] = parameter count (tagged)
3011 // esp[12] = address of receiver argument
3012 // Get the arguments boilerplate from the current (global) context into edi.
3013 Label has_mapped_parameters, copy;
3014 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3015 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
3016 __ mov(ebx, Operand(esp, 0 * kPointerSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01003017 __ test(ebx, Operand(ebx));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003018 __ j(not_zero, &has_mapped_parameters, Label::kNear);
3019 __ mov(edi, Operand(edi,
3020 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
3021 __ jmp(&copy, Label::kNear);
3022
3023 __ bind(&has_mapped_parameters);
3024 __ mov(edi, Operand(edi,
3025 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
3026 __ bind(&copy);
3027
3028 // eax = address of new object (tagged)
3029 // ebx = mapped parameter count (tagged)
3030 // ecx = argument count (tagged)
3031 // edi = address of boilerplate object (tagged)
3032 // esp[0] = mapped parameter count (tagged)
3033 // esp[8] = parameter count (tagged)
3034 // esp[12] = address of receiver argument
3035 // Copy the JS object part.
3036 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3037 __ mov(edx, FieldOperand(edi, i));
3038 __ mov(FieldOperand(eax, i), edx);
3039 }
3040
Ben Murdoch85b71792012-04-11 18:30:58 +01003041 // Setup the callee in-object property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003042 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
3043 __ mov(edx, Operand(esp, 4 * kPointerSize));
3044 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3045 Heap::kArgumentsCalleeIndex * kPointerSize),
3046 edx);
3047
3048 // Use the length (smi tagged) and set that as an in-object property too.
3049 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
3050 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
3051 Heap::kArgumentsLengthIndex * kPointerSize),
3052 ecx);
3053
Ben Murdoch85b71792012-04-11 18:30:58 +01003054 // Setup the elements pointer in the allocated arguments object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003055 // If we allocated a parameter map, edi will point there, otherwise to the
3056 // backing store.
3057 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
3058 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3059
3060 // eax = address of new object (tagged)
3061 // ebx = mapped parameter count (tagged)
3062 // ecx = argument count (tagged)
3063 // edi = address of parameter map or backing store (tagged)
3064 // esp[0] = mapped parameter count (tagged)
3065 // esp[8] = parameter count (tagged)
3066 // esp[12] = address of receiver argument
3067 // Free a register.
3068 __ push(eax);
3069
3070 // Initialize parameter map. If there are no mapped arguments, we're done.
3071 Label skip_parameter_map;
Ben Murdoch85b71792012-04-11 18:30:58 +01003072 __ test(ebx, Operand(ebx));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003073 __ j(zero, &skip_parameter_map);
3074
3075 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3076 Immediate(FACTORY->non_strict_arguments_elements_map()));
3077 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
3078 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
3079 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
3080 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
3081 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
3082
3083 // Copy the parameter slots and the holes in the arguments.
3084 // We need to fill in mapped_parameter_count slots. They index the context,
3085 // where parameters are stored in reverse order, at
3086 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
3087 // The mapped parameter thus need to get indices
3088 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
3089 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
3090 // We loop from right to left.
3091 Label parameters_loop, parameters_test;
3092 __ push(ecx);
3093 __ mov(eax, Operand(esp, 2 * kPointerSize));
3094 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
3095 __ add(ebx, Operand(esp, 4 * kPointerSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01003096 __ sub(ebx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003097 __ mov(ecx, FACTORY->the_hole_value());
3098 __ mov(edx, edi);
3099 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
3100 // eax = loop variable (tagged)
3101 // ebx = mapping index (tagged)
3102 // ecx = the hole value
3103 // edx = address of parameter map (tagged)
3104 // edi = address of backing store (tagged)
3105 // esp[0] = argument count (tagged)
3106 // esp[4] = address of new object (tagged)
3107 // esp[8] = mapped parameter count (tagged)
3108 // esp[16] = parameter count (tagged)
3109 // esp[20] = address of receiver argument
3110 __ jmp(&parameters_test, Label::kNear);
3111
3112 __ bind(&parameters_loop);
Ben Murdoch85b71792012-04-11 18:30:58 +01003113 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003114 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
3115 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01003116 __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003117 __ bind(&parameters_test);
Ben Murdoch85b71792012-04-11 18:30:58 +01003118 __ test(eax, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003119 __ j(not_zero, &parameters_loop, Label::kNear);
3120 __ pop(ecx);
3121
3122 __ bind(&skip_parameter_map);
3123
3124 // ecx = argument count (tagged)
3125 // edi = address of backing store (tagged)
3126 // esp[0] = address of new object (tagged)
3127 // esp[4] = mapped parameter count (tagged)
3128 // esp[12] = parameter count (tagged)
3129 // esp[16] = address of receiver argument
3130 // Copy arguments header and remaining slots (if there are any).
3131 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3132 Immediate(FACTORY->fixed_array_map()));
3133 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3134
3135 Label arguments_loop, arguments_test;
3136 __ mov(ebx, Operand(esp, 1 * kPointerSize));
3137 __ mov(edx, Operand(esp, 4 * kPointerSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01003138 __ sub(Operand(edx), ebx); // Is there a smarter way to do negative scaling?
3139 __ sub(Operand(edx), ebx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003140 __ jmp(&arguments_test, Label::kNear);
3141
3142 __ bind(&arguments_loop);
Ben Murdoch85b71792012-04-11 18:30:58 +01003143 __ sub(Operand(edx), Immediate(kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003144 __ mov(eax, Operand(edx, 0));
3145 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01003146 __ add(Operand(ebx), Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003147
3148 __ bind(&arguments_test);
Ben Murdoch85b71792012-04-11 18:30:58 +01003149 __ cmp(ebx, Operand(ecx));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003150 __ j(less, &arguments_loop, Label::kNear);
3151
3152 // Restore.
3153 __ pop(eax); // Address of arguments object.
3154 __ pop(ebx); // Parameter count.
3155
3156 // Return and remove the on-stack parameters.
3157 __ ret(3 * kPointerSize);
3158
3159 // Do the runtime call to allocate the arguments object.
3160 __ bind(&runtime);
3161 __ pop(eax); // Remove saved parameter count.
3162 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
3163 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
3164}
3165
3166
3167void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
3168 // esp[0] : return address
3169 // esp[4] : number of parameters
3170 // esp[8] : receiver displacement
3171 // esp[12] : function
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003172
3173 // Check if the calling frame is an arguments adaptor frame.
3174 Label adaptor_frame, try_allocate, runtime;
3175 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3176 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01003177 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003178 __ j(equal, &adaptor_frame, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003179
3180 // Get the length from the frame.
3181 __ mov(ecx, Operand(esp, 1 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003182 __ jmp(&try_allocate, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003183
3184 // Patch the arguments.length and the parameters pointer.
3185 __ bind(&adaptor_frame);
3186 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3187 __ mov(Operand(esp, 1 * kPointerSize), ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003188 __ lea(edx, Operand(edx, ecx, times_2,
3189 StandardFrameConstants::kCallerSPOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003190 __ mov(Operand(esp, 2 * kPointerSize), edx);
3191
3192 // Try the new space allocation. Start out with computing the size of
3193 // the arguments object and the elements array.
Ben Murdoch257744e2011-11-30 15:57:28 +00003194 Label add_arguments_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003195 __ bind(&try_allocate);
Ben Murdoch85b71792012-04-11 18:30:58 +01003196 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003197 __ j(zero, &add_arguments_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003198 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
3199 __ bind(&add_arguments_object);
Ben Murdoch85b71792012-04-11 18:30:58 +01003200 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSizeStrict));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003201
3202 // Do the allocation of both objects in one go.
3203 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
3204
3205 // Get the arguments boilerplate from the current (global) context.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003206 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3207 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003208 const int offset =
3209 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
3210 __ mov(edi, Operand(edi, offset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003211
3212 // Copy the JS object part.
3213 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
3214 __ mov(ebx, FieldOperand(edi, i));
3215 __ mov(FieldOperand(eax, i), ebx);
3216 }
3217
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003218 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01003219 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003220 __ mov(ecx, Operand(esp, 1 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003221 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003222 Heap::kArgumentsLengthIndex * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01003223 ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003224
3225 // If there are no actual arguments, we're done.
3226 Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01003227 __ test(ecx, Operand(ecx));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003228 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003229
3230 // Get the parameters pointer from the stack.
3231 __ mov(edx, Operand(esp, 2 * kPointerSize));
3232
Ben Murdoch85b71792012-04-11 18:30:58 +01003233 // Setup the elements pointer in the allocated arguments object and
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003234 // initialize the header in the elements fixed array.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003235 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003236 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3237 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003238 Immediate(FACTORY->fixed_array_map()));
Steve Block44f0eee2011-05-26 01:26:41 +01003239
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003240 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3241 // Untag the length for the loop below.
3242 __ SmiUntag(ecx);
3243
3244 // Copy the fixed array slots.
Ben Murdoch257744e2011-11-30 15:57:28 +00003245 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003246 __ bind(&loop);
3247 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3248 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01003249 __ add(Operand(edi), Immediate(kPointerSize));
3250 __ sub(Operand(edx), Immediate(kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003251 __ dec(ecx);
3252 __ j(not_zero, &loop);
3253
3254 // Return and remove the on-stack parameters.
3255 __ bind(&done);
3256 __ ret(3 * kPointerSize);
3257
3258 // Do the runtime call to allocate the arguments object.
3259 __ bind(&runtime);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003260 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003261}
3262
3263
3264void RegExpExecStub::Generate(MacroAssembler* masm) {
3265 // Just jump directly to runtime if native RegExp is not selected at compile
3266 // time or if regexp entry in generated code is turned off runtime switch or
3267 // at compilation.
3268#ifdef V8_INTERPRETED_REGEXP
3269 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3270#else // V8_INTERPRETED_REGEXP
Ben Murdoch85b71792012-04-11 18:30:58 +01003271 if (!FLAG_regexp_entry_native) {
3272 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3273 return;
3274 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003275
3276 // Stack frame on entry.
3277 // esp[0]: return address
3278 // esp[4]: last_match_info (expected JSArray)
3279 // esp[8]: previous index
3280 // esp[12]: subject string
3281 // esp[16]: JSRegExp object
3282
3283 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3284 static const int kPreviousIndexOffset = 2 * kPointerSize;
3285 static const int kSubjectOffset = 3 * kPointerSize;
3286 static const int kJSRegExpOffset = 4 * kPointerSize;
3287
3288 Label runtime, invoke_regexp;
3289
3290 // Ensure that a RegExp stack is allocated.
3291 ExternalReference address_of_regexp_stack_memory_address =
Steve Block44f0eee2011-05-26 01:26:41 +01003292 ExternalReference::address_of_regexp_stack_memory_address(
3293 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003294 ExternalReference address_of_regexp_stack_memory_size =
Steve Block44f0eee2011-05-26 01:26:41 +01003295 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003296 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Ben Murdoch85b71792012-04-11 18:30:58 +01003297 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003298 __ j(zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003299
3300 // Check that the first argument is a JSRegExp object.
3301 __ mov(eax, Operand(esp, kJSRegExpOffset));
3302 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003303 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003304 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3305 __ j(not_equal, &runtime);
3306 // Check that the RegExp has been compiled (data contains a fixed array).
3307 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3308 if (FLAG_debug_code) {
3309 __ test(ecx, Immediate(kSmiTagMask));
3310 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
3311 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
3312 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
3313 }
3314
3315 // ecx: RegExp data (FixedArray)
3316 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
3317 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01003318 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003319 __ j(not_equal, &runtime);
3320
3321 // ecx: RegExp data (FixedArray)
3322 // Check that the number of captures fit in the static offsets vector buffer.
3323 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3324 // Calculate number of capture registers (number_of_captures + 1) * 2. This
3325 // uses the asumption that smis are 2 * their untagged value.
3326 STATIC_ASSERT(kSmiTag == 0);
3327 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch85b71792012-04-11 18:30:58 +01003328 __ add(Operand(edx), Immediate(2)); // edx was a smi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003329 // Check that the static offsets vector buffer is large enough.
3330 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
3331 __ j(above, &runtime);
3332
3333 // ecx: RegExp data (FixedArray)
3334 // edx: Number of capture registers
3335 // Check that the second argument is a string.
3336 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003337 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003338 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3339 __ j(NegateCondition(is_string), &runtime);
3340 // Get the length of the string to ebx.
3341 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3342
3343 // ebx: Length of subject string as a smi
3344 // ecx: RegExp data (FixedArray)
3345 // edx: Number of capture registers
3346 // Check that the third argument is a positive smi less than the subject
3347 // string length. A negative value will be greater (unsigned comparison).
3348 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003349 __ JumpIfNotSmi(eax, &runtime);
Ben Murdoch85b71792012-04-11 18:30:58 +01003350 __ cmp(eax, Operand(ebx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003351 __ j(above_equal, &runtime);
3352
3353 // ecx: RegExp data (FixedArray)
3354 // edx: Number of capture registers
3355 // Check that the fourth object is a JSArray object.
3356 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003357 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003358 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3359 __ j(not_equal, &runtime);
3360 // Check that the JSArray is in fast case.
3361 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3362 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01003363 Factory* factory = masm->isolate()->factory();
3364 __ cmp(eax, factory->fixed_array_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003365 __ j(not_equal, &runtime);
3366 // Check that the last match info has space for the capture registers and the
3367 // additional information.
3368 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
3369 __ SmiUntag(eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01003370 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
3371 __ cmp(edx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003372 __ j(greater, &runtime);
3373
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003374 // Reset offset for possibly sliced string.
3375 __ Set(edi, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003376 // ecx: RegExp data (FixedArray)
3377 // Check the representation and encoding of the subject string.
3378 Label seq_ascii_string, seq_two_byte_string, check_code;
3379 __ mov(eax, Operand(esp, kSubjectOffset));
3380 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3381 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3382 // First check for flat two byte string.
Ben Murdoch85b71792012-04-11 18:30:58 +01003383 __ and_(ebx,
3384 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003385 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003386 __ j(zero, &seq_two_byte_string, Label::kNear);
Ben Murdoch85b71792012-04-11 18:30:58 +01003387 // Any other flat string must be a flat ascii string.
3388 __ and_(Operand(ebx),
3389 Immediate(kIsNotStringMask | kStringRepresentationMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003390 __ j(zero, &seq_ascii_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003391
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003392 // Check for flat cons string or sliced string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003393 // A flat cons string is a cons string where the second part is the empty
3394 // string. In that case the subject string is just the first part of the cons
3395 // string. Also in this case the first part of the cons string is known to be
3396 // a sequential string or an external string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003397 // In the case of a sliced string its offset has to be taken into account.
Ben Murdoch85b71792012-04-11 18:30:58 +01003398 Label cons_string, check_encoding;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003399 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
3400 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch85b71792012-04-11 18:30:58 +01003401 __ cmp(Operand(ebx), Immediate(kExternalStringTag));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003402 __ j(less, &cons_string);
Ben Murdoch85b71792012-04-11 18:30:58 +01003403 __ j(equal, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003404
3405 // String is sliced.
3406 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
3407 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
3408 // edi: offset of sliced string, smi-tagged.
3409 // eax: parent string.
3410 __ jmp(&check_encoding, Label::kNear);
3411 // String is a cons string, check whether it is flat.
3412 __ bind(&cons_string);
3413 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003414 __ j(not_equal, &runtime);
3415 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003416 __ bind(&check_encoding);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003417 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003418 // eax: first part of cons string or parent of sliced string.
3419 // ebx: map of first part of cons string or map of parent of sliced string.
3420 // Is first part of cons or parent of slice a flat two byte string?
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003421 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3422 kStringRepresentationMask | kStringEncodingMask);
3423 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003424 __ j(zero, &seq_two_byte_string, Label::kNear);
Ben Murdoch85b71792012-04-11 18:30:58 +01003425 // Any other flat string must be ascii.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003426 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
3427 kStringRepresentationMask);
Ben Murdoch85b71792012-04-11 18:30:58 +01003428 __ j(not_zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003429
3430 __ bind(&seq_ascii_string);
Ben Murdoch85b71792012-04-11 18:30:58 +01003431 // eax: subject string (flat ascii)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003432 // ecx: RegExp data (FixedArray)
3433 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01003434 __ Set(ecx, Immediate(1)); // Type is ascii.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003435 __ jmp(&check_code, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003436
3437 __ bind(&seq_two_byte_string);
3438 // eax: subject string (flat two byte)
3439 // ecx: RegExp data (FixedArray)
3440 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003441 __ Set(ecx, Immediate(0)); // Type is two byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003442
3443 __ bind(&check_code);
3444 // Check that the irregexp code has been generated for the actual string
3445 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00003446 // a smi (code flushing support).
3447 __ JumpIfSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003448
3449 // eax: subject string
3450 // edx: code
Ben Murdoch85b71792012-04-11 18:30:58 +01003451 // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003452 // Load used arguments before starting to push arguments for call to native
3453 // RegExp code to avoid handling changing stack height.
3454 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
3455 __ SmiUntag(ebx); // Previous index from smi.
3456
3457 // eax: subject string
3458 // ebx: previous index
3459 // edx: code
Ben Murdoch85b71792012-04-11 18:30:58 +01003460 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003461 // All checks done. Now push arguments for native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01003462 Counters* counters = masm->isolate()->counters();
3463 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003464
Steve Block44f0eee2011-05-26 01:26:41 +01003465 // Isolates: note we add an additional parameter here (isolate pointer).
3466 static const int kRegExpExecuteArguments = 8;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003467 __ EnterApiExitFrame(kRegExpExecuteArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003468
Steve Block44f0eee2011-05-26 01:26:41 +01003469 // Argument 8: Pass current isolate address.
3470 __ mov(Operand(esp, 7 * kPointerSize),
3471 Immediate(ExternalReference::isolate_address()));
3472
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003473 // Argument 7: Indicate that this is a direct call from JavaScript.
3474 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3475
3476 // Argument 6: Start (high end) of backtracking stack memory area.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003477 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3478 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3479 __ mov(Operand(esp, 5 * kPointerSize), esi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003480
3481 // Argument 5: static offsets vector buffer.
3482 __ mov(Operand(esp, 4 * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01003483 Immediate(ExternalReference::address_of_static_offsets_vector(
3484 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003485
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003486 // Argument 2: Previous index.
3487 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3488
3489 // Argument 1: Original subject string.
3490 // The original subject is in the previous stack frame. Therefore we have to
3491 // use ebp, which points exactly to one pointer size below the previous esp.
3492 // (Because creating a new stack frame pushes the previous ebp onto the stack
3493 // and thereby moves up esp by one kPointerSize.)
3494 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
3495 __ mov(Operand(esp, 0 * kPointerSize), esi);
3496
3497 // esi: original subject string
3498 // eax: underlying subject string
3499 // ebx: previous index
Ben Murdoch85b71792012-04-11 18:30:58 +01003500 // ecx: encoding of subject string (1 if ascii 0 if two_byte);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003501 // edx: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003502 // Argument 4: End of string data
3503 // Argument 3: Start of string data
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003504 // Prepare start and end index of the input.
3505 // Load the length from the original sliced string if that is the case.
3506 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01003507 __ add(esi, Operand(edi)); // Calculate input end wrt offset.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003508 __ SmiUntag(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01003509 __ add(ebx, Operand(edi)); // Calculate input start wrt offset.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003510
3511 // ebx: start index of the input string
3512 // esi: end index of the input string
3513 Label setup_two_byte, setup_rest;
Ben Murdoch85b71792012-04-11 18:30:58 +01003514 __ test(ecx, Operand(ecx));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003515 __ j(zero, &setup_two_byte, Label::kNear);
3516 __ SmiUntag(esi);
3517 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003518 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3519 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3520 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003521 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003522
3523 __ bind(&setup_two_byte);
3524 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003525 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
3526 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003527 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3528 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3529 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3530
3531 __ bind(&setup_rest);
3532
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003533 // Locate the code entry and call it.
Ben Murdoch85b71792012-04-11 18:30:58 +01003534 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3535 __ call(Operand(edx));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003536
3537 // Drop arguments and come back to JS mode.
3538 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003539
3540 // Check the result.
3541 Label success;
3542 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
Ben Murdoch257744e2011-11-30 15:57:28 +00003543 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003544 Label failure;
3545 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003546 __ j(equal, &failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003547 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3548 // If not exception it can only be retry. Handle that in the runtime system.
3549 __ j(not_equal, &runtime);
3550 // Result must now be exception. If there is no pending exception already a
3551 // stack overflow (on the backtrack stack) was detected in RegExp code but
3552 // haven't created the exception yet. Handle that in the runtime system.
3553 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdoch589d6972011-11-30 16:04:58 +00003554 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01003555 masm->isolate());
Ben Murdoch85b71792012-04-11 18:30:58 +01003556 __ mov(edx,
3557 Operand::StaticVariable(ExternalReference::the_hole_value_location(
3558 masm->isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003559 __ mov(eax, Operand::StaticVariable(pending_exception));
Ben Murdoch85b71792012-04-11 18:30:58 +01003560 __ cmp(edx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003561 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003562 // For exception, throw the exception again.
3563
3564 // Clear the pending exception variable.
3565 __ mov(Operand::StaticVariable(pending_exception), edx);
3566
3567 // Special handling of termination exceptions which are uncatchable
3568 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01003569 __ cmp(eax, factory->termination_exception());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003570 Label throw_termination_exception;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003571 __ j(equal, &throw_termination_exception, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003572
3573 // Handle normal exception by following handler chain.
3574 __ Throw(eax);
3575
3576 __ bind(&throw_termination_exception);
Ben Murdoch85b71792012-04-11 18:30:58 +01003577 __ ThrowUncatchable(TERMINATION, eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003578
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003579 __ bind(&failure);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003580 // For failure to match, return null.
Ben Murdoch85b71792012-04-11 18:30:58 +01003581 __ mov(Operand(eax), factory->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003582 __ ret(4 * kPointerSize);
3583
3584 // Load RegExp data.
3585 __ bind(&success);
3586 __ mov(eax, Operand(esp, kJSRegExpOffset));
3587 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
3588 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
3589 // Calculate number of capture registers (number_of_captures + 1) * 2.
3590 STATIC_ASSERT(kSmiTag == 0);
3591 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch85b71792012-04-11 18:30:58 +01003592 __ add(Operand(edx), Immediate(2)); // edx was a smi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003593
3594 // edx: Number of capture registers
3595 // Load last_match_info which is still known to be a fast case JSArray.
3596 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3597 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
3598
3599 // ebx: last_match_info backing store (FixedArray)
3600 // edx: number of capture registers
3601 // Store the capture count.
3602 __ SmiTag(edx); // Number of capture registers to smi.
3603 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
3604 __ SmiUntag(edx); // Number of capture registers back from smi.
3605 // Store last subject and last input.
3606 __ mov(eax, Operand(esp, kSubjectOffset));
3607 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01003608 __ mov(ecx, ebx);
3609 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003610 __ mov(eax, Operand(esp, kSubjectOffset));
3611 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01003612 __ mov(ecx, ebx);
3613 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003614
3615 // Get the static offsets vector filled by the native regexp code.
3616 ExternalReference address_of_static_offsets_vector =
Steve Block44f0eee2011-05-26 01:26:41 +01003617 ExternalReference::address_of_static_offsets_vector(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003618 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3619
3620 // ebx: last_match_info backing store (FixedArray)
3621 // ecx: offsets vector
3622 // edx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00003623 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003624 // Capture register counter starts from number of capture registers and
3625 // counts down until wraping after zero.
3626 __ bind(&next_capture);
Ben Murdoch85b71792012-04-11 18:30:58 +01003627 __ sub(Operand(edx), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003628 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003629 // Read the value from the static offsets vector buffer.
3630 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
3631 __ SmiTag(edi);
3632 // Store the smi value in the last match info.
3633 __ mov(FieldOperand(ebx,
3634 edx,
3635 times_pointer_size,
3636 RegExpImpl::kFirstCaptureOffset),
3637 edi);
3638 __ jmp(&next_capture);
3639 __ bind(&done);
3640
3641 // Return last match info.
3642 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3643 __ ret(4 * kPointerSize);
3644
3645 // Do the runtime call to execute the regexp.
3646 __ bind(&runtime);
3647 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3648#endif // V8_INTERPRETED_REGEXP
3649}
3650
3651
Ben Murdochb0fe1622011-05-05 13:52:32 +01003652void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3653 const int kMaxInlineLength = 100;
3654 Label slowcase;
Ben Murdoch257744e2011-11-30 15:57:28 +00003655 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003656 __ mov(ebx, Operand(esp, kPointerSize * 3));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003657 __ JumpIfNotSmi(ebx, &slowcase);
Ben Murdoch85b71792012-04-11 18:30:58 +01003658 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003659 __ j(above, &slowcase);
3660 // Smi-tagging is equivalent to multiplying by 2.
3661 STATIC_ASSERT(kSmiTag == 0);
3662 STATIC_ASSERT(kSmiTagSize == 1);
3663 // Allocate RegExpResult followed by FixedArray with size in ebx.
3664 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
3665 // Elements: [Map][Length][..elements..]
3666 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
3667 times_half_pointer_size,
3668 ebx, // In: Number of elements (times 2, being a smi)
3669 eax, // Out: Start of allocation (tagged).
3670 ecx, // Out: End of allocation.
3671 edx, // Scratch register
3672 &slowcase,
3673 TAG_OBJECT);
3674 // eax: Start of allocated area, object-tagged.
3675
3676 // Set JSArray map to global.regexp_result_map().
3677 // Set empty properties FixedArray.
3678 // Set elements to point to FixedArray allocated right after the JSArray.
3679 // Interleave operations for better latency.
3680 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
Steve Block44f0eee2011-05-26 01:26:41 +01003681 Factory* factory = masm->isolate()->factory();
3682 __ mov(ecx, Immediate(factory->empty_fixed_array()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003683 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
3684 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
3685 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
3686 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
3687 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
3688 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
3689
3690 // Set input, index and length fields from arguments.
3691 __ mov(ecx, Operand(esp, kPointerSize * 1));
3692 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
3693 __ mov(ecx, Operand(esp, kPointerSize * 2));
3694 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
3695 __ mov(ecx, Operand(esp, kPointerSize * 3));
3696 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
3697
3698 // Fill out the elements FixedArray.
3699 // eax: JSArray.
3700 // ebx: FixedArray.
3701 // ecx: Number of elements in array, as smi.
3702
3703 // Set map.
3704 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003705 Immediate(factory->fixed_array_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003706 // Set length.
3707 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
3708 // Fill contents of fixed-array with the-hole.
3709 __ SmiUntag(ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01003710 __ mov(edx, Immediate(factory->the_hole_value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003711 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
3712 // Fill fixed array elements with hole.
3713 // eax: JSArray.
3714 // ecx: Number of elements to fill.
3715 // ebx: Start of elements in FixedArray.
3716 // edx: the hole.
3717 Label loop;
Ben Murdoch85b71792012-04-11 18:30:58 +01003718 __ test(ecx, Operand(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003719 __ bind(&loop);
Ben Murdoch257744e2011-11-30 15:57:28 +00003720 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
Ben Murdoch85b71792012-04-11 18:30:58 +01003721 __ sub(Operand(ecx), Immediate(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003722 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
3723 __ jmp(&loop);
3724
3725 __ bind(&done);
3726 __ ret(3 * kPointerSize);
3727
3728 __ bind(&slowcase);
3729 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3730}
3731
3732
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003733void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3734 Register object,
3735 Register result,
3736 Register scratch1,
3737 Register scratch2,
3738 bool object_is_smi,
3739 Label* not_found) {
3740 // Use of registers. Register result is used as a temporary.
3741 Register number_string_cache = result;
3742 Register mask = scratch1;
3743 Register scratch = scratch2;
3744
3745 // Load the number string cache.
Ben Murdoch85b71792012-04-11 18:30:58 +01003746 ExternalReference roots_address =
3747 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003748 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
3749 __ mov(number_string_cache,
Ben Murdoch85b71792012-04-11 18:30:58 +01003750 Operand::StaticArray(scratch, times_pointer_size, roots_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003751 // Make the hash mask from the length of the number string cache. It
3752 // contains two elements (number and string) for each cache entry.
3753 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3754 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
Ben Murdoch85b71792012-04-11 18:30:58 +01003755 __ sub(Operand(mask), Immediate(1)); // Make mask.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003756
3757 // Calculate the entry in the number string cache. The hash value in the
3758 // number string cache for smis is just the smi value, and the hash for
3759 // doubles is the xor of the upper and lower words. See
3760 // Heap::GetNumberStringCache.
Ben Murdoch257744e2011-11-30 15:57:28 +00003761 Label smi_hash_calculated;
3762 Label load_result_from_cache;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003763 if (object_is_smi) {
3764 __ mov(scratch, object);
3765 __ SmiUntag(scratch);
3766 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003767 Label not_smi;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003768 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003769 __ JumpIfNotSmi(object, &not_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003770 __ mov(scratch, object);
3771 __ SmiUntag(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003772 __ jmp(&smi_hash_calculated, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003773 __ bind(&not_smi);
3774 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003775 masm->isolate()->factory()->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003776 __ j(not_equal, not_found);
3777 STATIC_ASSERT(8 == kDoubleSize);
3778 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3779 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3780 // Object is heap number and hash is now in scratch. Calculate cache index.
Ben Murdoch85b71792012-04-11 18:30:58 +01003781 __ and_(scratch, Operand(mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003782 Register index = scratch;
3783 Register probe = mask;
3784 __ mov(probe,
3785 FieldOperand(number_string_cache,
3786 index,
3787 times_twice_pointer_size,
3788 FixedArray::kHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003789 __ JumpIfSmi(probe, not_found);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003790 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003791 CpuFeatures::Scope fscope(SSE2);
3792 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3793 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
3794 __ ucomisd(xmm0, xmm1);
3795 } else {
3796 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3797 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3798 __ FCmp();
3799 }
3800 __ j(parity_even, not_found); // Bail out if NaN is involved.
3801 __ j(not_equal, not_found); // The cache did not contain this value.
Ben Murdoch257744e2011-11-30 15:57:28 +00003802 __ jmp(&load_result_from_cache, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003803 }
3804
3805 __ bind(&smi_hash_calculated);
3806 // Object is smi and hash is now in scratch. Calculate cache index.
Ben Murdoch85b71792012-04-11 18:30:58 +01003807 __ and_(scratch, Operand(mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003808 Register index = scratch;
3809 // Check if the entry is the smi we are looking for.
3810 __ cmp(object,
3811 FieldOperand(number_string_cache,
3812 index,
3813 times_twice_pointer_size,
3814 FixedArray::kHeaderSize));
3815 __ j(not_equal, not_found);
3816
3817 // Get the result from the cache.
3818 __ bind(&load_result_from_cache);
3819 __ mov(result,
3820 FieldOperand(number_string_cache,
3821 index,
3822 times_twice_pointer_size,
3823 FixedArray::kHeaderSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01003824 Counters* counters = masm->isolate()->counters();
3825 __ IncrementCounter(counters->number_to_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003826}
3827
3828
3829void NumberToStringStub::Generate(MacroAssembler* masm) {
3830 Label runtime;
3831
3832 __ mov(ebx, Operand(esp, kPointerSize));
3833
3834 // Generate code to lookup number in the number string cache.
3835 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
3836 __ ret(1 * kPointerSize);
3837
3838 __ bind(&runtime);
3839 // Handle number to string in the runtime system if not found in the cache.
3840 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3841}
3842
3843
3844static int NegativeComparisonResult(Condition cc) {
3845 ASSERT(cc != equal);
3846 ASSERT((cc == less) || (cc == less_equal)
3847 || (cc == greater) || (cc == greater_equal));
3848 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3849}
3850
3851void CompareStub::Generate(MacroAssembler* masm) {
3852 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3853
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003854 Label check_unequal_objects;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003855
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003856 // Compare two smis if required.
3857 if (include_smi_compare_) {
3858 Label non_smi, smi_done;
Ben Murdoch85b71792012-04-11 18:30:58 +01003859 __ mov(ecx, Operand(edx));
3860 __ or_(ecx, Operand(eax));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003861 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
Ben Murdoch85b71792012-04-11 18:30:58 +01003862 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003863 __ j(no_overflow, &smi_done, Label::kNear);
Ben Murdochf87a2032010-10-22 12:50:53 +01003864 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003865 __ bind(&smi_done);
3866 __ mov(eax, edx);
3867 __ ret(0);
3868 __ bind(&non_smi);
3869 } else if (FLAG_debug_code) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003870 __ mov(ecx, Operand(edx));
3871 __ or_(ecx, Operand(eax));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003872 __ test(ecx, Immediate(kSmiTagMask));
3873 __ Assert(not_zero, "Unexpected smi operands.");
3874 }
3875
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003876 // NOTICE! This code is only reached after a smi-fast-case check, so
3877 // it is certain that at least one operand isn't a smi.
3878
3879 // Identical objects can be compared fast, but there are some tricky cases
3880 // for NaN and undefined.
3881 {
3882 Label not_identical;
Ben Murdoch85b71792012-04-11 18:30:58 +01003883 __ cmp(eax, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003884 __ j(not_equal, &not_identical);
3885
3886 if (cc_ != equal) {
3887 // Check for undefined. undefined OP undefined is false even though
3888 // undefined == undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +00003889 Label check_for_nan;
Steve Block44f0eee2011-05-26 01:26:41 +01003890 __ cmp(edx, masm->isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003891 __ j(not_equal, &check_for_nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003892 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3893 __ ret(0);
3894 __ bind(&check_for_nan);
3895 }
3896
Steve Block44f0eee2011-05-26 01:26:41 +01003897 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003898 // so we do the second best thing - test it ourselves.
3899 // Note: if cc_ != equal, never_nan_nan_ is not used.
3900 if (never_nan_nan_ && (cc_ == equal)) {
3901 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3902 __ ret(0);
3903 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003904 Label heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003905 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003906 Immediate(masm->isolate()->factory()->heap_number_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003907 __ j(equal, &heap_number, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003908 if (cc_ != equal) {
3909 // Call runtime on identical JSObjects. Otherwise return equal.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003910 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003911 __ j(above_equal, &not_identical);
3912 }
3913 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3914 __ ret(0);
3915
3916 __ bind(&heap_number);
3917 // It is a heap number, so return non-equal if it's NaN and equal if
3918 // it's not NaN.
3919 // The representation of NaN values has all exponent bits (52..62) set,
3920 // and not all mantissa bits (0..51) clear.
3921 // We only accept QNaNs, which have bit 51 set.
3922 // Read top bits of double representation (second word of value).
3923
3924 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
3925 // all bits in the mask are set. We only need to check the word
3926 // that contains the exponent and high bit of the mantissa.
3927 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
3928 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003929 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003930 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
3931 // bits.
Ben Murdoch85b71792012-04-11 18:30:58 +01003932 __ add(edx, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003933 __ cmp(edx, kQuietNaNHighBitsMask << 1);
3934 if (cc_ == equal) {
3935 STATIC_ASSERT(EQUAL != 1);
3936 __ setcc(above_equal, eax);
3937 __ ret(0);
3938 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003939 Label nan;
3940 __ j(above_equal, &nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003941 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3942 __ ret(0);
3943 __ bind(&nan);
3944 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3945 __ ret(0);
3946 }
3947 }
3948
3949 __ bind(&not_identical);
3950 }
3951
3952 // Strict equality can quickly decide whether objects are equal.
3953 // Non-strict object equality is slower, so it is handled later in the stub.
3954 if (cc_ == equal && strict_) {
3955 Label slow; // Fallthrough label.
Ben Murdoch257744e2011-11-30 15:57:28 +00003956 Label not_smis;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003957 // If we're doing a strict equality comparison, we don't have to do
3958 // type conversion, so we generate code to do fast comparison for objects
3959 // and oddballs. Non-smi numbers and strings still go through the usual
3960 // slow-case code.
3961 // If either is a Smi (we know that not both are), then they can only
3962 // be equal if the other is a HeapNumber. If so, use the slow case.
3963 STATIC_ASSERT(kSmiTag == 0);
3964 ASSERT_EQ(0, Smi::FromInt(0));
3965 __ mov(ecx, Immediate(kSmiTagMask));
Ben Murdoch85b71792012-04-11 18:30:58 +01003966 __ and_(ecx, Operand(eax));
3967 __ test(ecx, Operand(edx));
Ben Murdoch257744e2011-11-30 15:57:28 +00003968 __ j(not_zero, &not_smis, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003969 // One operand is a smi.
3970
3971 // Check whether the non-smi is a heap number.
3972 STATIC_ASSERT(kSmiTagMask == 1);
3973 // ecx still holds eax & kSmiTag, which is either zero or one.
Ben Murdoch85b71792012-04-11 18:30:58 +01003974 __ sub(Operand(ecx), Immediate(0x01));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003975 __ mov(ebx, edx);
Ben Murdoch85b71792012-04-11 18:30:58 +01003976 __ xor_(ebx, Operand(eax));
3977 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
3978 __ xor_(ebx, Operand(eax));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003979 // if eax was smi, ebx is now edx, else eax.
3980
3981 // Check if the non-smi operand is a heap number.
3982 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003983 Immediate(masm->isolate()->factory()->heap_number_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003984 // If heap number, handle it in the slow case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003985 __ j(equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003986 // Return non-equal (ebx is not zero)
3987 __ mov(eax, ebx);
3988 __ ret(0);
3989
3990 __ bind(&not_smis);
3991 // If either operand is a JSObject or an oddball value, then they are not
3992 // equal since their pointers are different
3993 // There is no test for undetectability in strict equality.
3994
3995 // Get the type of the first operand.
3996 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00003997 Label first_non_object;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003998 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
3999 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004000 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004001
4002 // Return non-zero (eax is not zero)
Ben Murdoch257744e2011-11-30 15:57:28 +00004003 Label return_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004004 STATIC_ASSERT(kHeapObjectTag != 0);
4005 __ bind(&return_not_equal);
4006 __ ret(0);
4007
4008 __ bind(&first_non_object);
4009 // Check for oddballs: true, false, null, undefined.
4010 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4011 __ j(equal, &return_not_equal);
4012
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004013 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004014 __ j(above_equal, &return_not_equal);
4015
4016 // Check for oddballs: true, false, null, undefined.
4017 __ CmpInstanceType(ecx, ODDBALL_TYPE);
4018 __ j(equal, &return_not_equal);
4019
4020 // Fall through to the general case.
4021 __ bind(&slow);
4022 }
4023
4024 // Generate the number comparison code.
4025 if (include_number_compare_) {
4026 Label non_number_comparison;
4027 Label unordered;
Ben Murdoch8b112d22011-06-08 16:22:53 +01004028 if (CpuFeatures::IsSupported(SSE2)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004029 CpuFeatures::Scope use_sse2(SSE2);
4030 CpuFeatures::Scope use_cmov(CMOV);
4031
4032 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4033 __ ucomisd(xmm0, xmm1);
4034
4035 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004036 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004037 // Return a result of -1, 0, or 1, based on EFLAGS.
4038 __ mov(eax, 0); // equal
4039 __ mov(ecx, Immediate(Smi::FromInt(1)));
Ben Murdoch85b71792012-04-11 18:30:58 +01004040 __ cmov(above, eax, Operand(ecx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004041 __ mov(ecx, Immediate(Smi::FromInt(-1)));
Ben Murdoch85b71792012-04-11 18:30:58 +01004042 __ cmov(below, eax, Operand(ecx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004043 __ ret(0);
4044 } else {
4045 FloatingPointHelper::CheckFloatOperands(
4046 masm, &non_number_comparison, ebx);
4047 FloatingPointHelper::LoadFloatOperand(masm, eax);
4048 FloatingPointHelper::LoadFloatOperand(masm, edx);
4049 __ FCmp();
4050
4051 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004052 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004053
Ben Murdoch257744e2011-11-30 15:57:28 +00004054 Label below_label, above_label;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004055 // Return a result of -1, 0, or 1, based on EFLAGS.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004056 __ j(below, &below_label, Label::kNear);
4057 __ j(above, &above_label, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004058
Steve Block9fac8402011-05-12 15:51:54 +01004059 __ Set(eax, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004060 __ ret(0);
4061
4062 __ bind(&below_label);
4063 __ mov(eax, Immediate(Smi::FromInt(-1)));
4064 __ ret(0);
4065
4066 __ bind(&above_label);
4067 __ mov(eax, Immediate(Smi::FromInt(1)));
4068 __ ret(0);
4069 }
4070
4071 // If one of the numbers was NaN, then the result is always false.
4072 // The cc is never not-equal.
4073 __ bind(&unordered);
4074 ASSERT(cc_ != not_equal);
4075 if (cc_ == less || cc_ == less_equal) {
4076 __ mov(eax, Immediate(Smi::FromInt(1)));
4077 } else {
4078 __ mov(eax, Immediate(Smi::FromInt(-1)));
4079 }
4080 __ ret(0);
4081
4082 // The number comparison code did not provide a valid result.
4083 __ bind(&non_number_comparison);
4084 }
4085
4086 // Fast negative check for symbol-to-symbol equality.
4087 Label check_for_strings;
4088 if (cc_ == equal) {
4089 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
4090 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
4091
4092 // We've already checked for object identity, so if both operands
4093 // are symbols they aren't equal. Register eax already holds a
4094 // non-zero value, which indicates not equal, so just return.
4095 __ ret(0);
4096 }
4097
4098 __ bind(&check_for_strings);
4099
4100 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
4101 &check_unequal_objects);
4102
Ben Murdoch85b71792012-04-11 18:30:58 +01004103 // Inline comparison of ascii strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00004104 if (cc_ == equal) {
4105 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004106 edx,
4107 eax,
4108 ecx,
Ben Murdoch257744e2011-11-30 15:57:28 +00004109 ebx);
4110 } else {
4111 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
4112 edx,
4113 eax,
4114 ecx,
4115 ebx,
4116 edi);
4117 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004118#ifdef DEBUG
4119 __ Abort("Unexpected fall-through from string comparison");
4120#endif
4121
4122 __ bind(&check_unequal_objects);
4123 if (cc_ == equal && !strict_) {
4124 // Non-strict equality. Objects are unequal if
4125 // they are both JSObjects and not undetectable,
4126 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00004127 Label not_both_objects;
4128 Label return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004129 // At most one is a smi, so we can test for smi by adding the two.
4130 // A smi plus a heap object has the low bit set, a heap object plus
4131 // a heap object has the low bit clear.
4132 STATIC_ASSERT(kSmiTag == 0);
4133 STATIC_ASSERT(kSmiTagMask == 1);
4134 __ lea(ecx, Operand(eax, edx, times_1, 0));
4135 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004136 __ j(not_zero, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004137 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004138 __ j(below, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004139 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004140 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004141 // We do not bail out after this point. Both are JSObjects, and
4142 // they are equal if and only if both are undetectable.
4143 // The and of the undetectable flags is 1 if and only if they are equal.
4144 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
4145 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00004146 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004147 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
4148 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00004149 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004150 // The objects are both undetectable, so they both compare as the value
4151 // undefined, and are equal.
4152 __ Set(eax, Immediate(EQUAL));
4153 __ bind(&return_unequal);
4154 // Return non-equal by returning the non-zero object pointer in eax,
4155 // or return equal if we fell through to here.
4156 __ ret(0); // rax, rdx were pushed
4157 __ bind(&not_both_objects);
4158 }
4159
4160 // Push arguments below the return address.
4161 __ pop(ecx);
4162 __ push(edx);
4163 __ push(eax);
4164
4165 // Figure out which native to call and setup the arguments.
4166 Builtins::JavaScript builtin;
4167 if (cc_ == equal) {
4168 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
4169 } else {
4170 builtin = Builtins::COMPARE;
4171 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
4172 }
4173
4174 // Restore return address on the stack.
4175 __ push(ecx);
4176
4177 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
4178 // tagged as a small integer.
4179 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
4180}
4181
4182
4183void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
4184 Label* label,
4185 Register object,
4186 Register scratch) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004187 __ JumpIfSmi(object, label);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004188 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
4189 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4190 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
4191 __ cmp(scratch, kSymbolTag | kStringTag);
4192 __ j(not_equal, label);
4193}
4194
4195
4196void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01004197 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004198}
4199
4200
4201void CallFunctionStub::Generate(MacroAssembler* masm) {
Ben Murdoch589d6972011-11-30 16:04:58 +00004202 Label slow, non_function;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004203
Ben Murdoch257744e2011-11-30 15:57:28 +00004204 // The receiver might implicitly be the global object. This is
4205 // indicated by passing the hole as the receiver to the call
4206 // function stub.
4207 if (ReceiverMightBeImplicit()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004208 Label call;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004209 // Get the receiver from the stack.
4210 // +1 ~ return address
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004211 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00004212 // Call as function is indicated with the hole.
Ben Murdoch85b71792012-04-11 18:30:58 +01004213 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
4214 __ j(not_equal, &call, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00004215 // Patch the receiver on the stack with the global receiver object.
Ben Murdoch85b71792012-04-11 18:30:58 +01004216 __ mov(ebx, GlobalObjectOperand());
4217 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
4218 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx);
4219 __ bind(&call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004220 }
4221
Ben Murdoch85b71792012-04-11 18:30:58 +01004222 // Get the function to call from the stack.
4223 // +2 ~ receiver, return address
4224 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
4225
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004226 // Check that the function really is a JavaScript function.
Ben Murdoch589d6972011-11-30 16:04:58 +00004227 __ JumpIfSmi(edi, &non_function);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004228 // Goto slow case if we do not have a function.
4229 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004230 __ j(not_equal, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004231
4232 // Fast-case: Just invoke the function.
4233 ParameterCount actual(argc_);
Ben Murdoch257744e2011-11-30 15:57:28 +00004234
4235 if (ReceiverMightBeImplicit()) {
4236 Label call_as_function;
Ben Murdoch85b71792012-04-11 18:30:58 +01004237 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004238 __ j(equal, &call_as_function);
4239 __ InvokeFunction(edi,
4240 actual,
4241 JUMP_FUNCTION,
4242 NullCallWrapper(),
4243 CALL_AS_METHOD);
4244 __ bind(&call_as_function);
4245 }
4246 __ InvokeFunction(edi,
4247 actual,
4248 JUMP_FUNCTION,
4249 NullCallWrapper(),
4250 CALL_AS_FUNCTION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004251
4252 // Slow-case: Non-function called.
4253 __ bind(&slow);
Ben Murdoch589d6972011-11-30 16:04:58 +00004254 // Check for function proxy.
4255 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
4256 __ j(not_equal, &non_function);
4257 __ pop(ecx);
4258 __ push(edi); // put proxy as additional argument under return address
4259 __ push(ecx);
4260 __ Set(eax, Immediate(argc_ + 1));
4261 __ Set(ebx, Immediate(0));
4262 __ SetCallKind(ecx, CALL_AS_FUNCTION);
4263 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
4264 {
Ben Murdoch85b71792012-04-11 18:30:58 +01004265 Handle<Code> adaptor =
4266 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch589d6972011-11-30 16:04:58 +00004267 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4268 }
4269
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004270 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4271 // of the original receiver from the call site).
Ben Murdoch589d6972011-11-30 16:04:58 +00004272 __ bind(&non_function);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004273 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4274 __ Set(eax, Immediate(argc_));
4275 __ Set(ebx, Immediate(0));
Ben Murdoch589d6972011-11-30 16:04:58 +00004276 __ SetCallKind(ecx, CALL_AS_METHOD);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004277 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
Ben Murdoch85b71792012-04-11 18:30:58 +01004278 Handle<Code> adaptor =
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01004279 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch85b71792012-04-11 18:30:58 +01004280 __ jmp(adaptor, RelocInfo::CODE_TARGET);
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01004281}
4282
4283
Steve Block44f0eee2011-05-26 01:26:41 +01004284bool CEntryStub::NeedsImmovableCode() {
4285 return false;
4286}
4287
4288
Ben Murdoch85b71792012-04-11 18:30:58 +01004289void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
4290 __ Throw(eax);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004291}
4292
4293
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004294void CEntryStub::GenerateCore(MacroAssembler* masm,
4295 Label* throw_normal_exception,
4296 Label* throw_termination_exception,
4297 Label* throw_out_of_memory_exception,
4298 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01004299 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004300 // eax: result parameter for PerformGC, if any
4301 // ebx: pointer to C function (C callee-saved)
4302 // ebp: frame pointer (restored after C call)
4303 // esp: stack pointer (restored after C call)
4304 // edi: number of arguments including receiver (C callee-saved)
4305 // esi: pointer to the first argument (C callee-saved)
4306
4307 // Result returned in eax, or eax+edx if result_size_ is 2.
4308
4309 // Check stack alignment.
4310 if (FLAG_debug_code) {
4311 __ CheckStackAlignment();
4312 }
4313
4314 if (do_gc) {
4315 // Pass failure code returned from last attempt as first argument to
4316 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4317 // stack alignment is known to be correct. This function takes one argument
4318 // which is passed on the stack, and we know that the stack has been
4319 // prepared to pass at least one argument.
4320 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4321 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4322 }
4323
4324 ExternalReference scope_depth =
Steve Block44f0eee2011-05-26 01:26:41 +01004325 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004326 if (always_allocate_scope) {
4327 __ inc(Operand::StaticVariable(scope_depth));
4328 }
4329
4330 // Call C function.
4331 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4332 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
Steve Block44f0eee2011-05-26 01:26:41 +01004333 __ mov(Operand(esp, 2 * kPointerSize),
4334 Immediate(ExternalReference::isolate_address()));
Ben Murdoch85b71792012-04-11 18:30:58 +01004335 __ call(Operand(ebx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004336 // Result is in eax or edx:eax - do not destroy these registers!
4337
4338 if (always_allocate_scope) {
4339 __ dec(Operand::StaticVariable(scope_depth));
4340 }
4341
4342 // Make sure we're not trying to return 'the hole' from the runtime
4343 // call as this may lead to crashes in the IC code later.
4344 if (FLAG_debug_code) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004345 Label okay;
Steve Block44f0eee2011-05-26 01:26:41 +01004346 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004347 __ j(not_equal, &okay, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004348 __ int3();
4349 __ bind(&okay);
4350 }
4351
4352 // Check for failure result.
4353 Label failure_returned;
4354 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4355 __ lea(ecx, Operand(eax, 1));
4356 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4357 __ test(ecx, Immediate(kFailureTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00004358 __ j(zero, &failure_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004359
Steve Block44f0eee2011-05-26 01:26:41 +01004360 ExternalReference pending_exception_address(
Ben Murdoch589d6972011-11-30 16:04:58 +00004361 Isolate::kPendingExceptionAddress, masm->isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01004362
4363 // Check that there is no pending exception, otherwise we
4364 // should have returned some failure value.
4365 if (FLAG_debug_code) {
4366 __ push(edx);
Ben Murdoch85b71792012-04-11 18:30:58 +01004367 __ mov(edx, Operand::StaticVariable(
4368 ExternalReference::the_hole_value_location(masm->isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004369 Label okay;
Steve Block1e0659c2011-05-24 12:43:12 +01004370 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
4371 // Cannot use check here as it attempts to generate call into runtime.
Ben Murdoch257744e2011-11-30 15:57:28 +00004372 __ j(equal, &okay, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004373 __ int3();
4374 __ bind(&okay);
4375 __ pop(edx);
4376 }
4377
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004378 // Exit the JavaScript to C++ exit frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01004379 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004380 __ ret(0);
4381
4382 // Handling of failure.
4383 __ bind(&failure_returned);
4384
4385 Label retry;
4386 // If the returned exception is RETRY_AFTER_GC continue at retry label
4387 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4388 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004389 __ j(zero, &retry, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004390
4391 // Special handling of out of memory exceptions.
4392 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4393 __ j(equal, throw_out_of_memory_exception);
4394
4395 // Retrieve the pending exception and clear the variable.
Ben Murdoch85b71792012-04-11 18:30:58 +01004396 ExternalReference the_hole_location =
4397 ExternalReference::the_hole_value_location(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004398 __ mov(eax, Operand::StaticVariable(pending_exception_address));
Ben Murdoch85b71792012-04-11 18:30:58 +01004399 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004400 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4401
4402 // Special handling of termination exceptions which are uncatchable
4403 // by javascript code.
Steve Block44f0eee2011-05-26 01:26:41 +01004404 __ cmp(eax, masm->isolate()->factory()->termination_exception());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004405 __ j(equal, throw_termination_exception);
4406
4407 // Handle normal exception.
4408 __ jmp(throw_normal_exception);
4409
4410 // Retry.
4411 __ bind(&retry);
4412}
4413
4414
Ben Murdoch85b71792012-04-11 18:30:58 +01004415void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
4416 UncatchableExceptionType type) {
4417 __ ThrowUncatchable(type, eax);
4418}
4419
4420
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004421void CEntryStub::Generate(MacroAssembler* masm) {
4422 // eax: number of arguments including receiver
4423 // ebx: pointer to C function (C callee-saved)
4424 // ebp: frame pointer (restored after C call)
4425 // esp: stack pointer (restored after C call)
4426 // esi: current context (C callee-saved)
4427 // edi: JS function of the caller (C callee-saved)
4428
4429 // NOTE: Invocations of builtins may return failure objects instead
4430 // of a proper result. The builtin entry handles this by performing
4431 // a garbage collection and retrying the builtin (twice).
4432
4433 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdoch85b71792012-04-11 18:30:58 +01004434 __ EnterExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004435
4436 // eax: result parameter for PerformGC, if any (setup below)
4437 // ebx: pointer to builtin function (C callee-saved)
4438 // ebp: frame pointer (restored after C call)
4439 // esp: stack pointer (restored after C call)
4440 // edi: number of arguments including receiver (C callee-saved)
4441 // esi: argv pointer (C callee-saved)
4442
4443 Label throw_normal_exception;
4444 Label throw_termination_exception;
4445 Label throw_out_of_memory_exception;
4446
4447 // Call into the runtime system.
4448 GenerateCore(masm,
4449 &throw_normal_exception,
4450 &throw_termination_exception,
4451 &throw_out_of_memory_exception,
4452 false,
4453 false);
4454
4455 // Do space-specific GC and retry runtime call.
4456 GenerateCore(masm,
4457 &throw_normal_exception,
4458 &throw_termination_exception,
4459 &throw_out_of_memory_exception,
4460 true,
4461 false);
4462
4463 // Do full GC and retry runtime call one final time.
4464 Failure* failure = Failure::InternalError();
4465 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
4466 GenerateCore(masm,
4467 &throw_normal_exception,
4468 &throw_termination_exception,
4469 &throw_out_of_memory_exception,
4470 true,
4471 true);
4472
4473 __ bind(&throw_out_of_memory_exception);
Ben Murdoch85b71792012-04-11 18:30:58 +01004474 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004475
4476 __ bind(&throw_termination_exception);
Ben Murdoch85b71792012-04-11 18:30:58 +01004477 GenerateThrowUncatchable(masm, TERMINATION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004478
4479 __ bind(&throw_normal_exception);
Ben Murdoch85b71792012-04-11 18:30:58 +01004480 GenerateThrowTOS(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004481}
4482
4483
4484void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004485 Label invoke, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004486 Label not_outermost_js, not_outermost_js_2;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004487
Ben Murdoch85b71792012-04-11 18:30:58 +01004488 // Setup frame.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004489 __ push(ebp);
Ben Murdoch85b71792012-04-11 18:30:58 +01004490 __ mov(ebp, Operand(esp));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004491
4492 // Push marker in two places.
4493 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4494 __ push(Immediate(Smi::FromInt(marker))); // context slot
4495 __ push(Immediate(Smi::FromInt(marker))); // function slot
4496 // Save callee-saved registers (C calling conventions).
4497 __ push(edi);
4498 __ push(esi);
4499 __ push(ebx);
4500
4501 // Save copies of the top frame descriptor on the stack.
Ben Murdoch589d6972011-11-30 16:04:58 +00004502 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004503 __ push(Operand::StaticVariable(c_entry_fp));
4504
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004505 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdoch589d6972011-11-30 16:04:58 +00004506 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004507 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004508 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004509 __ j(not_equal, &not_outermost_js, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004510 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
Steve Block053d10c2011-06-13 19:13:29 +01004511 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4512 Label cont;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004513 __ jmp(&cont, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004514 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01004515 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4516 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004517
Ben Murdoch85b71792012-04-11 18:30:58 +01004518 // Call a faked try-block that does the invoke.
4519 __ call(&invoke);
4520
4521 // Caught exception: Store result (exception) in the pending
4522 // exception field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00004523 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004524 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004525 __ mov(Operand::StaticVariable(pending_exception), eax);
4526 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4527 __ jmp(&exit);
4528
Ben Murdoch85b71792012-04-11 18:30:58 +01004529 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004530 __ bind(&invoke);
Ben Murdoch85b71792012-04-11 18:30:58 +01004531 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004532
4533 // Clear any pending exceptions.
Ben Murdoch85b71792012-04-11 18:30:58 +01004534 ExternalReference the_hole_location =
4535 ExternalReference::the_hole_value_location(masm->isolate());
4536 __ mov(edx, Operand::StaticVariable(the_hole_location));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004537 __ mov(Operand::StaticVariable(pending_exception), edx);
4538
4539 // Fake a receiver (NULL).
4540 __ push(Immediate(0)); // receiver
4541
Ben Murdoch85b71792012-04-11 18:30:58 +01004542 // Invoke the function by calling through JS entry trampoline
4543 // builtin and pop the faked function when we return. Notice that we
4544 // cannot store a reference to the trampoline code directly in this
4545 // stub, because the builtin stubs may not have been generated yet.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004546 if (is_construct) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004547 ExternalReference construct_entry(
4548 Builtins::kJSConstructEntryTrampoline,
4549 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004550 __ mov(edx, Immediate(construct_entry));
4551 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004552 ExternalReference entry(Builtins::kJSEntryTrampoline,
4553 masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004554 __ mov(edx, Immediate(entry));
4555 }
4556 __ mov(edx, Operand(edx, 0)); // deref address
4557 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01004558 __ call(Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004559
4560 // Unlink this frame from the handler chain.
Steve Block053d10c2011-06-13 19:13:29 +01004561 __ PopTryHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004562
Steve Block053d10c2011-06-13 19:13:29 +01004563 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01004564 // Check if the current stack frame is marked as the outermost JS frame.
4565 __ pop(ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01004566 __ cmp(Operand(ebx),
4567 Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004568 __ j(not_equal, &not_outermost_js_2);
4569 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4570 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004571
4572 // Restore the top frame descriptor from the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01004573 __ pop(Operand::StaticVariable(ExternalReference(
Ben Murdoch589d6972011-11-30 16:04:58 +00004574 Isolate::kCEntryFPAddress,
Steve Block44f0eee2011-05-26 01:26:41 +01004575 masm->isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004576
4577 // Restore callee-saved registers (C calling conventions).
4578 __ pop(ebx);
4579 __ pop(esi);
4580 __ pop(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01004581 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004582
4583 // Restore frame pointer and return.
4584 __ pop(ebp);
4585 __ ret(0);
4586}
4587
4588
Ben Murdoch086aeea2011-05-13 15:57:08 +01004589// Generate stub code for instanceof.
4590// This code can patch a call site inlined cache of the instance of check,
4591// which looks like this.
4592//
4593// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
4594// 75 0a jne <some near label>
4595// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
4596//
4597// If call site patching is requested the stack will have the delta from the
4598// return address to the cmp instruction just below the return address. This
4599// also means that call site patching can only take place with arguments in
4600// registers. TOS looks like this when call site patching is requested
4601//
4602// esp[0] : return address
4603// esp[4] : delta from return address to cmp instruction
4604//
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004605void InstanceofStub::Generate(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004606 // Call site inlining and patching implies arguments in registers.
4607 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
4608
Ben Murdochb0fe1622011-05-05 13:52:32 +01004609 // Fixed register usage throughout the stub.
4610 Register object = eax; // Object (lhs).
4611 Register map = ebx; // Map of the object.
4612 Register function = edx; // Function (rhs).
4613 Register prototype = edi; // Prototype of the function.
4614 Register scratch = ecx;
4615
Ben Murdoch086aeea2011-05-13 15:57:08 +01004616 // Constants describing the call site code to patch.
4617 static const int kDeltaToCmpImmediate = 2;
4618 static const int kDeltaToMov = 8;
4619 static const int kDeltaToMovImmediate = 9;
Ben Murdoch85b71792012-04-11 18:30:58 +01004620 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
4621 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004622 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
4623
Ben Murdoch85b71792012-04-11 18:30:58 +01004624 ExternalReference roots_address =
4625 ExternalReference::roots_address(masm->isolate());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004626
4627 ASSERT_EQ(object.code(), InstanceofStub::left().code());
4628 ASSERT_EQ(function.code(), InstanceofStub::right().code());
4629
Ben Murdochb0fe1622011-05-05 13:52:32 +01004630 // Get the object and function - they are always both needed.
4631 Label slow, not_js_object;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004632 if (!HasArgsInRegisters()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004633 __ mov(object, Operand(esp, 2 * kPointerSize));
4634 __ mov(function, Operand(esp, 1 * kPointerSize));
4635 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004636
4637 // Check that the left hand is a JS object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004638 __ JumpIfSmi(object, &not_js_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004639 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004640
Ben Murdoch086aeea2011-05-13 15:57:08 +01004641 // If there is a call site cache don't look in the global cache, but do the
4642 // real lookup and update the call site cache.
4643 if (!HasCallSiteInlineCheck()) {
4644 // Look up the function and the map in the instanceof cache.
Ben Murdoch257744e2011-11-30 15:57:28 +00004645 Label miss;
Ben Murdoch086aeea2011-05-13 15:57:08 +01004646 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
Ben Murdoch85b71792012-04-11 18:30:58 +01004647 __ cmp(function,
4648 Operand::StaticArray(scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004649 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004650 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4651 __ cmp(map, Operand::StaticArray(
Ben Murdoch85b71792012-04-11 18:30:58 +01004652 scratch, times_pointer_size, roots_address));
Ben Murdoch257744e2011-11-30 15:57:28 +00004653 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004654 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4655 __ mov(eax, Operand::StaticArray(
Ben Murdoch85b71792012-04-11 18:30:58 +01004656 scratch, times_pointer_size, roots_address));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004657 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4658 __ bind(&miss);
4659 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004660
Ben Murdochb0fe1622011-05-05 13:52:32 +01004661 // Get the prototype of the function.
Ben Murdoch85b71792012-04-11 18:30:58 +01004662 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004663
4664 // Check that the function prototype is a JS object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004665 __ JumpIfSmi(prototype, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004666 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004667
Ben Murdoch086aeea2011-05-13 15:57:08 +01004668 // Update the global instanceof or call site inlined cache with the current
4669 // map and function. The cached answer will be set when it is known below.
4670 if (!HasCallSiteInlineCheck()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004671 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
Ben Murdoch85b71792012-04-11 18:30:58 +01004672 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004673 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
Ben Murdoch85b71792012-04-11 18:30:58 +01004674 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
Ben Murdochb0fe1622011-05-05 13:52:32 +01004675 function);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004676 } else {
4677 // The constants for the code patching are based on no push instructions
4678 // at the call site.
4679 ASSERT(HasArgsInRegisters());
4680 // Get return address and delta to inlined map check.
4681 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4682 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4683 if (FLAG_debug_code) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004684 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004685 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
Ben Murdoch85b71792012-04-11 18:30:58 +01004686 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004687 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
4688 }
Ben Murdoch85b71792012-04-11 18:30:58 +01004689 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004690 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004691
Ben Murdochb0fe1622011-05-05 13:52:32 +01004692 // Loop through the prototype chain of the object looking for the function
4693 // prototype.
4694 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00004695 Label loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004696 __ bind(&loop);
Ben Murdoch85b71792012-04-11 18:30:58 +01004697 __ cmp(scratch, Operand(prototype));
Ben Murdoch257744e2011-11-30 15:57:28 +00004698 __ j(equal, &is_instance, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004699 Factory* factory = masm->isolate()->factory();
Ben Murdoch85b71792012-04-11 18:30:58 +01004700 __ cmp(Operand(scratch), Immediate(factory->null_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00004701 __ j(equal, &is_not_instance, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004702 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4703 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004704 __ jmp(&loop);
4705
4706 __ bind(&is_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004707 if (!HasCallSiteInlineCheck()) {
4708 __ Set(eax, Immediate(0));
4709 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4710 __ mov(Operand::StaticArray(scratch,
Ben Murdoch85b71792012-04-11 18:30:58 +01004711 times_pointer_size, roots_address), eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004712 } else {
4713 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004714 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004715 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4716 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4717 if (FLAG_debug_code) {
4718 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4719 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4720 }
4721 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4722 if (!ReturnTrueFalseObject()) {
4723 __ Set(eax, Immediate(0));
4724 }
4725 }
4726 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004727
4728 __ bind(&is_not_instance);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004729 if (!HasCallSiteInlineCheck()) {
4730 __ Set(eax, Immediate(Smi::FromInt(1)));
4731 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4732 __ mov(Operand::StaticArray(
Ben Murdoch85b71792012-04-11 18:30:58 +01004733 scratch, times_pointer_size, roots_address), eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004734 } else {
4735 // Get return address and delta to inlined map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004736 __ mov(eax, factory->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004737 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4738 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4739 if (FLAG_debug_code) {
4740 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
4741 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
4742 }
4743 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
4744 if (!ReturnTrueFalseObject()) {
4745 __ Set(eax, Immediate(Smi::FromInt(1)));
4746 }
4747 }
4748 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004749
4750 Label object_not_null, object_not_null_or_smi;
4751 __ bind(&not_js_object);
4752 // Before null, smi and string value checks, check that the rhs is a function
4753 // as for a non-function rhs an exception needs to be thrown.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004754 __ JumpIfSmi(function, &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004755 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004756 __ j(not_equal, &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004757
4758 // Null is not instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01004759 __ cmp(object, factory->null_value());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004760 __ j(not_equal, &object_not_null, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004761 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004762 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004763
4764 __ bind(&object_not_null);
4765 // Smi values is not instance of anything.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004766 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004767 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004768 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004769
4770 __ bind(&object_not_null_or_smi);
4771 // String values is not instance of anything.
4772 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004773 __ j(NegateCondition(is_string), &slow, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004774 __ Set(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004775 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004776
4777 // Slow-case: Go through the JavaScript implementation.
4778 __ bind(&slow);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004779 if (!ReturnTrueFalseObject()) {
4780 // Tail call the builtin which returns 0 or 1.
4781 if (HasArgsInRegisters()) {
4782 // Push arguments below return address.
4783 __ pop(scratch);
4784 __ push(object);
4785 __ push(function);
4786 __ push(scratch);
4787 }
4788 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
4789 } else {
4790 // Call the builtin and convert 0/1 to true/false.
Ben Murdoch85b71792012-04-11 18:30:58 +01004791 __ EnterInternalFrame();
4792 __ push(object);
4793 __ push(function);
4794 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
4795 __ LeaveInternalFrame();
Ben Murdoch257744e2011-11-30 15:57:28 +00004796 Label true_value, done;
Ben Murdoch85b71792012-04-11 18:30:58 +01004797 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00004798 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004799 __ mov(eax, factory->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004800 __ jmp(&done, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004801 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01004802 __ mov(eax, factory->true_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004803 __ bind(&done);
4804 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004805 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004806}
4807
4808
Ben Murdoch086aeea2011-05-13 15:57:08 +01004809Register InstanceofStub::left() { return eax; }
4810
4811
4812Register InstanceofStub::right() { return edx; }
4813
4814
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004815int CompareStub::MinorKey() {
4816 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
4817 // stubs the never NaN NaN condition is only taken into account if the
4818 // condition is equals.
4819 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
4820 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
4821 return ConditionField::encode(static_cast<unsigned>(cc_))
4822 | RegisterField::encode(false) // lhs_ and rhs_ are not used
4823 | StrictField::encode(strict_)
4824 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004825 | IncludeNumberCompareField::encode(include_number_compare_)
4826 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004827}
4828
4829
4830// Unfortunately you have to run without snapshots to see most of these
4831// names in the profile since most compare stubs end up in the snapshot.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004832void CompareStub::PrintName(StringStream* stream) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004833 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004834 const char* cc_name;
4835 switch (cc_) {
4836 case less: cc_name = "LT"; break;
4837 case greater: cc_name = "GT"; break;
4838 case less_equal: cc_name = "LE"; break;
4839 case greater_equal: cc_name = "GE"; break;
4840 case equal: cc_name = "EQ"; break;
4841 case not_equal: cc_name = "NE"; break;
4842 default: cc_name = "UnknownCondition"; break;
4843 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004844 bool is_equality = cc_ == equal || cc_ == not_equal;
4845 stream->Add("CompareStub_%s", cc_name);
4846 if (strict_ && is_equality) stream->Add("_STRICT");
4847 if (never_nan_nan_ && is_equality) stream->Add("_NO_NAN");
4848 if (!include_number_compare_) stream->Add("_NO_NUMBER");
4849 if (!include_smi_compare_) stream->Add("_NO_SMI");
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004850}
4851
4852
4853// -------------------------------------------------------------------------
4854// StringCharCodeAtGenerator
4855
4856void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004857 Label flat_string;
4858 Label ascii_string;
4859 Label got_char_code;
4860 Label sliced_string;
4861
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004862 // If the receiver is a smi trigger the non-string case.
4863 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004864 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004865
4866 // Fetch the instance type of the receiver into result register.
4867 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4868 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4869 // If the receiver is not a string trigger the non-string case.
4870 __ test(result_, Immediate(kIsNotStringMask));
4871 __ j(not_zero, receiver_not_string_);
4872
4873 // If the index is non-smi trigger the non-smi case.
4874 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004875 __ JumpIfNotSmi(index_, &index_not_smi_);
Ben Murdoch85b71792012-04-11 18:30:58 +01004876
4877 // Put smi-tagged index into scratch register.
4878 __ mov(scratch_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004879 __ bind(&got_smi_index_);
4880
4881 // Check for index out of range.
Ben Murdoch85b71792012-04-11 18:30:58 +01004882 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004883 __ j(above_equal, index_out_of_range_);
4884
Ben Murdoch85b71792012-04-11 18:30:58 +01004885 // We need special handling for non-flat strings.
4886 STATIC_ASSERT(kSeqStringTag == 0);
4887 __ test(result_, Immediate(kStringRepresentationMask));
4888 __ j(zero, &flat_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004889
Ben Murdoch85b71792012-04-11 18:30:58 +01004890 // Handle non-flat strings.
4891 __ and_(result_, kStringRepresentationMask);
4892 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
4893 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
4894 __ cmp(result_, kExternalStringTag);
4895 __ j(greater, &sliced_string, Label::kNear);
4896 __ j(equal, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004897
Ben Murdoch85b71792012-04-11 18:30:58 +01004898 // ConsString.
4899 // Check whether the right hand side is the empty string (i.e. if
4900 // this is really a flat string in a cons string). If that is not
4901 // the case we would rather go to the runtime system now to flatten
4902 // the string.
4903 Label assure_seq_string;
4904 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
4905 Immediate(masm->isolate()->factory()->empty_string()));
4906 __ j(not_equal, &call_runtime_);
4907 // Get the first of the two strings and load its instance type.
4908 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
4909 __ jmp(&assure_seq_string, Label::kNear);
4910
4911 // SlicedString, unpack and add offset.
4912 __ bind(&sliced_string);
4913 __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
4914 __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
4915
4916 // Assure that we are dealing with a sequential string. Go to runtime if not.
4917 __ bind(&assure_seq_string);
4918 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4919 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4920 STATIC_ASSERT(kSeqStringTag == 0);
4921 __ test(result_, Immediate(kStringRepresentationMask));
4922 __ j(not_zero, &call_runtime_);
4923 __ jmp(&flat_string, Label::kNear);
4924
4925 // Check for 1-byte or 2-byte string.
4926 __ bind(&flat_string);
4927 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
4928 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4929 __ test(result_, Immediate(kStringEncodingMask));
4930 __ j(not_zero, &ascii_string, Label::kNear);
4931
4932 // 2-byte string.
4933 // Load the 2-byte character code into the result register.
4934 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4935 __ movzx_w(result_, FieldOperand(object_,
4936 scratch_, times_1, // Scratch is smi-tagged.
4937 SeqTwoByteString::kHeaderSize));
4938 __ jmp(&got_char_code, Label::kNear);
4939
4940 // ASCII string.
4941 // Load the byte into the result register.
4942 __ bind(&ascii_string);
4943 __ SmiUntag(scratch_);
4944 __ movzx_b(result_, FieldOperand(object_,
4945 scratch_, times_1,
4946 SeqAsciiString::kHeaderSize));
4947 __ bind(&got_char_code);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004948 __ SmiTag(result_);
4949 __ bind(&exit_);
4950}
4951
4952
4953void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch85b71792012-04-11 18:30:58 +01004954 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004955 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
4956
4957 // Index is not a smi.
4958 __ bind(&index_not_smi_);
4959 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01004960 __ CheckMap(index_,
4961 masm->isolate()->factory()->heap_number_map(),
4962 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00004963 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004964 call_helper.BeforeCall(masm);
4965 __ push(object_);
Ben Murdoch85b71792012-04-11 18:30:58 +01004966 __ push(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004967 __ push(index_); // Consumed by runtime conversion function.
4968 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
4969 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4970 } else {
4971 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
4972 // NumberToSmi discards numbers that are not exact integers.
4973 __ CallRuntime(Runtime::kNumberToSmi, 1);
4974 }
Ben Murdoch85b71792012-04-11 18:30:58 +01004975 if (!scratch_.is(eax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004976 // Save the conversion result before the pop instructions below
4977 // have a chance to overwrite it.
Ben Murdoch85b71792012-04-11 18:30:58 +01004978 __ mov(scratch_, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004979 }
Ben Murdoch85b71792012-04-11 18:30:58 +01004980 __ pop(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004981 __ pop(object_);
4982 // Reload the instance type.
4983 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
4984 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4985 call_helper.AfterCall(masm);
4986 // If index is still not a smi, it must be out of range.
4987 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01004988 __ JumpIfNotSmi(scratch_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004989 // Otherwise, return to the fast path.
4990 __ jmp(&got_smi_index_);
4991
4992 // Call runtime. We get here when the receiver is a string and the
4993 // index is a number, but the code of getting the actual character
4994 // is too complex (e.g., when the string needs to be flattened).
4995 __ bind(&call_runtime_);
4996 call_helper.BeforeCall(masm);
4997 __ push(object_);
4998 __ push(index_);
4999 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5000 if (!result_.is(eax)) {
5001 __ mov(result_, eax);
5002 }
5003 call_helper.AfterCall(masm);
5004 __ jmp(&exit_);
5005
5006 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
5007}
5008
5009
5010// -------------------------------------------------------------------------
5011// StringCharFromCodeGenerator
5012
5013void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
5014 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5015 STATIC_ASSERT(kSmiTag == 0);
5016 STATIC_ASSERT(kSmiShiftSize == 0);
5017 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
5018 __ test(code_,
5019 Immediate(kSmiTagMask |
5020 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00005021 __ j(not_zero, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005022
Steve Block44f0eee2011-05-26 01:26:41 +01005023 Factory* factory = masm->isolate()->factory();
5024 __ Set(result_, Immediate(factory->single_character_string_cache()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005025 STATIC_ASSERT(kSmiTag == 0);
5026 STATIC_ASSERT(kSmiTagSize == 1);
5027 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01005028 // At this point code register contains smi tagged ascii char code.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005029 __ mov(result_, FieldOperand(result_,
5030 code_, times_half_pointer_size,
5031 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +01005032 __ cmp(result_, factory->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00005033 __ j(equal, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005034 __ bind(&exit_);
5035}
5036
5037
5038void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch85b71792012-04-11 18:30:58 +01005039 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005040 __ Abort("Unexpected fallthrough to CharFromCode slow case");
5041
5042 __ bind(&slow_case_);
5043 call_helper.BeforeCall(masm);
5044 __ push(code_);
5045 __ CallRuntime(Runtime::kCharFromCode, 1);
5046 if (!result_.is(eax)) {
5047 __ mov(result_, eax);
5048 }
5049 call_helper.AfterCall(masm);
5050 __ jmp(&exit_);
5051
5052 __ Abort("Unexpected fallthrough from CharFromCode slow case");
5053}
5054
5055
5056// -------------------------------------------------------------------------
5057// StringCharAtGenerator
5058
5059void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
5060 char_code_at_generator_.GenerateFast(masm);
5061 char_from_code_generator_.GenerateFast(masm);
5062}
5063
5064
5065void StringCharAtGenerator::GenerateSlow(
Ben Murdoch85b71792012-04-11 18:30:58 +01005066 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005067 char_code_at_generator_.GenerateSlow(masm, call_helper);
5068 char_from_code_generator_.GenerateSlow(masm, call_helper);
5069}
5070
5071
5072void StringAddStub::Generate(MacroAssembler* masm) {
Ben Murdoch85b71792012-04-11 18:30:58 +01005073 Label string_add_runtime, call_builtin;
Iain Merrick9ac36c92010-09-13 15:29:50 +01005074 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005075
5076 // Load the two arguments.
5077 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5078 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5079
5080 // Make sure that both arguments are strings if not known in advance.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005081 if (flags_ == NO_STRING_ADD_FLAGS) {
Ben Murdoch85b71792012-04-11 18:30:58 +01005082 __ JumpIfSmi(eax, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005083 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005084 __ j(above_equal, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005085
5086 // First argument is a a string, test second.
Ben Murdoch85b71792012-04-11 18:30:58 +01005087 __ JumpIfSmi(edx, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005088 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005089 __ j(above_equal, &string_add_runtime);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005090 } else {
5091 // Here at least one of the arguments is definitely a string.
5092 // We convert the one that is not known to be a string.
5093 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
5094 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
5095 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
5096 &call_builtin);
5097 builtin_id = Builtins::STRING_ADD_RIGHT;
5098 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
5099 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
5100 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
5101 &call_builtin);
5102 builtin_id = Builtins::STRING_ADD_LEFT;
5103 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005104 }
5105
5106 // Both arguments are strings.
5107 // eax: first string
5108 // edx: second string
5109 // Check if either of the strings are empty. In that case return the other.
Ben Murdoch257744e2011-11-30 15:57:28 +00005110 Label second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005111 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
5112 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01005113 __ test(ecx, Operand(ecx));
Ben Murdoch257744e2011-11-30 15:57:28 +00005114 __ j(not_zero, &second_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005115 // Second string is empty, result is first string which is already in eax.
Steve Block44f0eee2011-05-26 01:26:41 +01005116 Counters* counters = masm->isolate()->counters();
5117 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005118 __ ret(2 * kPointerSize);
5119 __ bind(&second_not_zero_length);
5120 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
5121 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01005122 __ test(ebx, Operand(ebx));
Ben Murdoch257744e2011-11-30 15:57:28 +00005123 __ j(not_zero, &both_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005124 // First string is empty, result is second string which is in edx.
5125 __ mov(eax, edx);
Steve Block44f0eee2011-05-26 01:26:41 +01005126 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005127 __ ret(2 * kPointerSize);
5128
5129 // Both strings are non-empty.
5130 // eax: first string
5131 // ebx: length of first string as a smi
5132 // ecx: length of second string as a smi
5133 // edx: second string
5134 // Look at the length of the result of adding the two strings.
5135 Label string_add_flat_result, longer_than_two;
5136 __ bind(&both_not_zero_length);
Ben Murdoch85b71792012-04-11 18:30:58 +01005137 __ add(ebx, Operand(ecx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005138 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
5139 // Handle exceptionally long strings in the runtime system.
Ben Murdoch85b71792012-04-11 18:30:58 +01005140 __ j(overflow, &string_add_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +01005141 // Use the symbol table when adding two one character strings, as it
5142 // helps later optimizations to return a symbol here.
Ben Murdoch85b71792012-04-11 18:30:58 +01005143 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005144 __ j(not_equal, &longer_than_two);
5145
Ben Murdoch85b71792012-04-11 18:30:58 +01005146 // Check that both strings are non-external ascii strings.
5147 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
5148 &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005149
Iain Merrick9ac36c92010-09-13 15:29:50 +01005150 // Get the two characters forming the new string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005151 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5152 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5153
5154 // Try to lookup two character string in symbol table. If it is not found
5155 // just allocate a new one.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005156 Label make_two_character_string, make_two_character_string_no_reload;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005157 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Iain Merrick9ac36c92010-09-13 15:29:50 +01005158 masm, ebx, ecx, eax, edx, edi,
5159 &make_two_character_string_no_reload, &make_two_character_string);
Steve Block44f0eee2011-05-26 01:26:41 +01005160 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005161 __ ret(2 * kPointerSize);
5162
Iain Merrick9ac36c92010-09-13 15:29:50 +01005163 // Allocate a two character string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005164 __ bind(&make_two_character_string);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005165 // Reload the arguments.
5166 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
5167 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
5168 // Get the two characters forming the new string.
5169 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
5170 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
5171 __ bind(&make_two_character_string_no_reload);
Steve Block44f0eee2011-05-26 01:26:41 +01005172 __ IncrementCounter(counters->string_add_make_two_char(), 1);
Ben Murdoch85b71792012-04-11 18:30:58 +01005173 __ AllocateAsciiString(eax, // Result.
5174 2, // Length.
5175 edi, // Scratch 1.
5176 edx, // Scratch 2.
5177 &string_add_runtime);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005178 // Pack both characters in ebx.
5179 __ shl(ecx, kBitsPerByte);
Ben Murdoch85b71792012-04-11 18:30:58 +01005180 __ or_(ebx, Operand(ecx));
Iain Merrick9ac36c92010-09-13 15:29:50 +01005181 // Set the characters in the new string.
5182 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
Steve Block44f0eee2011-05-26 01:26:41 +01005183 __ IncrementCounter(counters->string_add_native(), 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005184 __ ret(2 * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005185
5186 __ bind(&longer_than_two);
5187 // Check if resulting string will be flat.
Ben Murdoch85b71792012-04-11 18:30:58 +01005188 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005189 __ j(below, &string_add_flat_result);
5190
5191 // If result is not supposed to be flat allocate a cons string object. If both
Ben Murdoch85b71792012-04-11 18:30:58 +01005192 // strings are ascii the result is an ascii cons string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005193 Label non_ascii, allocated, ascii_data;
5194 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
5195 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
5196 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
5197 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01005198 __ and_(ecx, Operand(edi));
Ben Murdoch589d6972011-11-30 16:04:58 +00005199 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5200 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5201 __ test(ecx, Immediate(kStringEncodingMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005202 __ j(zero, &non_ascii);
5203 __ bind(&ascii_data);
Ben Murdoch85b71792012-04-11 18:30:58 +01005204 // Allocate an acsii cons string.
5205 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005206 __ bind(&allocated);
5207 // Fill the fields of the cons string.
5208 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
5209 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
5210 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
5211 Immediate(String::kEmptyHashField));
5212 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
5213 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
5214 __ mov(eax, ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01005215 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005216 __ ret(2 * kPointerSize);
5217 __ bind(&non_ascii);
5218 // At least one of the strings is two-byte. Check whether it happens
Ben Murdoch85b71792012-04-11 18:30:58 +01005219 // to contain only ascii characters.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005220 // ecx: first instance type AND second instance type.
5221 // edi: second instance type.
5222 __ test(ecx, Immediate(kAsciiDataHintMask));
5223 __ j(not_zero, &ascii_data);
5224 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5225 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01005226 __ xor_(edi, Operand(ecx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005227 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
5228 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
5229 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
5230 __ j(equal, &ascii_data);
5231 // Allocate a two byte cons string.
Ben Murdoch85b71792012-04-11 18:30:58 +01005232 __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005233 __ jmp(&allocated);
5234
Ben Murdoch85b71792012-04-11 18:30:58 +01005235 // Handle creating a flat result. First check that both strings are not
5236 // external strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005237 // eax: first string
5238 // ebx: length of resulting flat string as a smi
5239 // edx: second string
5240 __ bind(&string_add_flat_result);
5241 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5242 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01005243 __ and_(ecx, kStringRepresentationMask);
5244 __ cmp(ecx, kExternalStringTag);
5245 __ j(equal, &string_add_runtime);
5246 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5247 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5248 __ and_(ecx, kStringRepresentationMask);
5249 __ cmp(ecx, kExternalStringTag);
5250 __ j(equal, &string_add_runtime);
5251 // We cannot encounter sliced strings here since:
5252 STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
5253 // Now check if both strings are ascii strings.
5254 // eax: first string
5255 // ebx: length of resulting flat string as a smi
5256 // edx: second string
5257 Label non_ascii_string_add_flat_result;
5258 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5259 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5260 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5261 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
Ben Murdochc7cc0282012-03-05 14:35:55 +00005262 __ j(zero, &non_ascii_string_add_flat_result);
Ben Murdoch85b71792012-04-11 18:30:58 +01005263 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5264 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
5265 __ j(zero, &string_add_runtime);
Ben Murdochc7cc0282012-03-05 14:35:55 +00005266
Ben Murdoch85b71792012-04-11 18:30:58 +01005267 // Both strings are ascii strings. As they are short they are both flat.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005268 // ebx: length of resulting flat string as a smi
5269 __ SmiUntag(ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005270 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005271 // eax: result string
5272 __ mov(ecx, eax);
5273 // Locate first character of result.
Ben Murdoch85b71792012-04-11 18:30:58 +01005274 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5275 // Load first argument and locate first character.
5276 __ mov(edx, Operand(esp, 2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005277 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5278 __ SmiUntag(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01005279 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005280 // eax: result string
5281 // ecx: first character of result
5282 // edx: first char of first argument
5283 // edi: length of first argument
5284 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Ben Murdoch85b71792012-04-11 18:30:58 +01005285 // Load second argument and locate first character.
5286 __ mov(edx, Operand(esp, 1 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005287 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5288 __ SmiUntag(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01005289 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005290 // eax: result string
5291 // ecx: next character of result
5292 // edx: first char of second argument
5293 // edi: length of second argument
5294 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Block44f0eee2011-05-26 01:26:41 +01005295 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005296 __ ret(2 * kPointerSize);
5297
5298 // Handle creating a flat two byte result.
5299 // eax: first string - known to be two byte
5300 // ebx: length of resulting flat string as a smi
5301 // edx: second string
5302 __ bind(&non_ascii_string_add_flat_result);
Ben Murdoch85b71792012-04-11 18:30:58 +01005303 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
5304 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
5305 __ j(not_zero, &string_add_runtime);
5306 // Both strings are two byte strings. As they are short they are both
5307 // flat.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005308 __ SmiUntag(ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005309 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005310 // eax: result string
5311 __ mov(ecx, eax);
5312 // Locate first character of result.
Ben Murdoch85b71792012-04-11 18:30:58 +01005313 __ add(Operand(ecx),
5314 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5315 // Load first argument and locate first character.
5316 __ mov(edx, Operand(esp, 2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005317 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5318 __ SmiUntag(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01005319 __ add(Operand(edx),
5320 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005321 // eax: result string
5322 // ecx: first character of result
5323 // edx: first char of first argument
5324 // edi: length of first argument
5325 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Ben Murdoch85b71792012-04-11 18:30:58 +01005326 // Load second argument and locate first character.
5327 __ mov(edx, Operand(esp, 1 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005328 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
5329 __ SmiUntag(edi);
Ben Murdoch85b71792012-04-11 18:30:58 +01005330 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005331 // eax: result string
5332 // ecx: next character of result
5333 // edx: first char of second argument
5334 // edi: length of second argument
5335 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Block44f0eee2011-05-26 01:26:41 +01005336 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005337 __ ret(2 * kPointerSize);
5338
5339 // Just jump to runtime to add the two strings.
Ben Murdoch85b71792012-04-11 18:30:58 +01005340 __ bind(&string_add_runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005341 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005342
5343 if (call_builtin.is_linked()) {
5344 __ bind(&call_builtin);
5345 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
5346 }
5347}
5348
5349
5350void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
5351 int stack_offset,
5352 Register arg,
5353 Register scratch1,
5354 Register scratch2,
5355 Register scratch3,
5356 Label* slow) {
5357 // First check if the argument is already a string.
5358 Label not_string, done;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005359 __ JumpIfSmi(arg, &not_string);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005360 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
5361 __ j(below, &done);
5362
5363 // Check the number to string cache.
5364 Label not_cached;
5365 __ bind(&not_string);
5366 // Puts the cached result into scratch1.
5367 NumberToStringStub::GenerateLookupNumberStringCache(masm,
5368 arg,
5369 scratch1,
5370 scratch2,
5371 scratch3,
5372 false,
5373 &not_cached);
5374 __ mov(arg, scratch1);
5375 __ mov(Operand(esp, stack_offset), arg);
5376 __ jmp(&done);
5377
5378 // Check if the argument is a safe string wrapper.
5379 __ bind(&not_cached);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005380 __ JumpIfSmi(arg, slow);
Iain Merrick9ac36c92010-09-13 15:29:50 +01005381 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
5382 __ j(not_equal, slow);
5383 __ test_b(FieldOperand(scratch1, Map::kBitField2Offset),
5384 1 << Map::kStringWrapperSafeForDefaultValueOf);
5385 __ j(zero, slow);
5386 __ mov(arg, FieldOperand(arg, JSValue::kValueOffset));
5387 __ mov(Operand(esp, stack_offset), arg);
5388
5389 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005390}
5391
5392
5393void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5394 Register dest,
5395 Register src,
5396 Register count,
5397 Register scratch,
5398 bool ascii) {
Ben Murdoch257744e2011-11-30 15:57:28 +00005399 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005400 __ bind(&loop);
5401 // This loop just copies one character at a time, as it is only used for very
5402 // short strings.
5403 if (ascii) {
5404 __ mov_b(scratch, Operand(src, 0));
5405 __ mov_b(Operand(dest, 0), scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01005406 __ add(Operand(src), Immediate(1));
5407 __ add(Operand(dest), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005408 } else {
5409 __ mov_w(scratch, Operand(src, 0));
5410 __ mov_w(Operand(dest, 0), scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01005411 __ add(Operand(src), Immediate(2));
5412 __ add(Operand(dest), Immediate(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005413 }
Ben Murdoch85b71792012-04-11 18:30:58 +01005414 __ sub(Operand(count), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005415 __ j(not_zero, &loop);
5416}
5417
5418
5419void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
5420 Register dest,
5421 Register src,
5422 Register count,
5423 Register scratch,
5424 bool ascii) {
5425 // Copy characters using rep movs of doublewords.
5426 // The destination is aligned on a 4 byte boundary because we are
5427 // copying to the beginning of a newly allocated string.
5428 ASSERT(dest.is(edi)); // rep movs destination
5429 ASSERT(src.is(esi)); // rep movs source
5430 ASSERT(count.is(ecx)); // rep movs count
5431 ASSERT(!scratch.is(dest));
5432 ASSERT(!scratch.is(src));
5433 ASSERT(!scratch.is(count));
5434
5435 // Nothing to do for zero characters.
5436 Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01005437 __ test(count, Operand(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005438 __ j(zero, &done);
5439
5440 // Make count the number of bytes to copy.
5441 if (!ascii) {
5442 __ shl(count, 1);
5443 }
5444
5445 // Don't enter the rep movs if there are less than 4 bytes to copy.
Ben Murdoch257744e2011-11-30 15:57:28 +00005446 Label last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005447 __ test(count, Immediate(~3));
Ben Murdoch257744e2011-11-30 15:57:28 +00005448 __ j(zero, &last_bytes, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005449
5450 // Copy from edi to esi using rep movs instruction.
5451 __ mov(scratch, count);
5452 __ sar(count, 2); // Number of doublewords to copy.
5453 __ cld();
5454 __ rep_movs();
5455
5456 // Find number of bytes left.
5457 __ mov(count, scratch);
5458 __ and_(count, 3);
5459
5460 // Check if there are more bytes to copy.
5461 __ bind(&last_bytes);
Ben Murdoch85b71792012-04-11 18:30:58 +01005462 __ test(count, Operand(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005463 __ j(zero, &done);
5464
5465 // Copy remaining characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00005466 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005467 __ bind(&loop);
5468 __ mov_b(scratch, Operand(src, 0));
5469 __ mov_b(Operand(dest, 0), scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01005470 __ add(Operand(src), Immediate(1));
5471 __ add(Operand(dest), Immediate(1));
5472 __ sub(Operand(count), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005473 __ j(not_zero, &loop);
5474
5475 __ bind(&done);
5476}
5477
5478
5479void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5480 Register c1,
5481 Register c2,
5482 Register scratch1,
5483 Register scratch2,
5484 Register scratch3,
Iain Merrick9ac36c92010-09-13 15:29:50 +01005485 Label* not_probed,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005486 Label* not_found) {
5487 // Register scratch3 is the general scratch register in this function.
5488 Register scratch = scratch3;
5489
5490 // Make sure that both characters are not digits as such strings has a
5491 // different hash algorithm. Don't try to look for these in the symbol table.
Ben Murdoch257744e2011-11-30 15:57:28 +00005492 Label not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005493 __ mov(scratch, c1);
Ben Murdoch85b71792012-04-11 18:30:58 +01005494 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5495 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Ben Murdoch257744e2011-11-30 15:57:28 +00005496 __ j(above, &not_array_index, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005497 __ mov(scratch, c2);
Ben Murdoch85b71792012-04-11 18:30:58 +01005498 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5499 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
Iain Merrick9ac36c92010-09-13 15:29:50 +01005500 __ j(below_equal, not_probed);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005501
5502 __ bind(&not_array_index);
5503 // Calculate the two character string hash.
5504 Register hash = scratch1;
5505 GenerateHashInit(masm, hash, c1, scratch);
5506 GenerateHashAddCharacter(masm, hash, c2, scratch);
5507 GenerateHashGetHash(masm, hash, scratch);
5508
5509 // Collect the two characters in a register.
5510 Register chars = c1;
5511 __ shl(c2, kBitsPerByte);
Ben Murdoch85b71792012-04-11 18:30:58 +01005512 __ or_(chars, Operand(c2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005513
5514 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5515 // hash: hash of two character string.
5516
5517 // Load the symbol table.
5518 Register symbol_table = c2;
Ben Murdoch85b71792012-04-11 18:30:58 +01005519 ExternalReference roots_address =
5520 ExternalReference::roots_address(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005521 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5522 __ mov(symbol_table,
Ben Murdoch85b71792012-04-11 18:30:58 +01005523 Operand::StaticArray(scratch, times_pointer_size, roots_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005524
5525 // Calculate capacity mask from the symbol table capacity.
5526 Register mask = scratch2;
5527 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5528 __ SmiUntag(mask);
Ben Murdoch85b71792012-04-11 18:30:58 +01005529 __ sub(Operand(mask), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005530
5531 // Registers
5532 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5533 // hash: hash of two character string
5534 // symbol_table: symbol table
5535 // mask: capacity mask
5536 // scratch: -
5537
5538 // Perform a number of probes in the symbol table.
5539 static const int kProbes = 4;
5540 Label found_in_symbol_table;
5541 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
Ben Murdoch692be652012-01-10 18:47:50 +00005542 Register candidate = scratch; // Scratch register contains candidate.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005543 for (int i = 0; i < kProbes; i++) {
5544 // Calculate entry in symbol table.
5545 __ mov(scratch, hash);
5546 if (i > 0) {
Ben Murdoch85b71792012-04-11 18:30:58 +01005547 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005548 }
Ben Murdoch85b71792012-04-11 18:30:58 +01005549 __ and_(scratch, Operand(mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005550
5551 // Load the entry from the symbol table.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005552 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
5553 __ mov(candidate,
5554 FieldOperand(symbol_table,
5555 scratch,
5556 times_pointer_size,
5557 SymbolTable::kElementsStartOffset));
5558
5559 // If entry is undefined no string with this hash can be found.
Steve Block44f0eee2011-05-26 01:26:41 +01005560 Factory* factory = masm->isolate()->factory();
5561 __ cmp(candidate, factory->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005562 __ j(equal, not_found);
Ben Murdoch85b71792012-04-11 18:30:58 +01005563 __ cmp(candidate, factory->null_value());
Steve Block44f0eee2011-05-26 01:26:41 +01005564 __ j(equal, &next_probe[i]);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005565
5566 // If length is not 2 the string is not a candidate.
5567 __ cmp(FieldOperand(candidate, String::kLengthOffset),
5568 Immediate(Smi::FromInt(2)));
5569 __ j(not_equal, &next_probe[i]);
5570
5571 // As we are out of registers save the mask on the stack and use that
5572 // register as a temporary.
5573 __ push(mask);
5574 Register temp = mask;
5575
Ben Murdoch85b71792012-04-11 18:30:58 +01005576 // Check that the candidate is a non-external ascii string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005577 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
5578 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
5579 __ JumpIfInstanceTypeIsNotSequentialAscii(
5580 temp, temp, &next_probe_pop_mask[i]);
5581
5582 // Check if the two characters match.
5583 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
5584 __ and_(temp, 0x0000ffff);
Ben Murdoch85b71792012-04-11 18:30:58 +01005585 __ cmp(chars, Operand(temp));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005586 __ j(equal, &found_in_symbol_table);
5587 __ bind(&next_probe_pop_mask[i]);
5588 __ pop(mask);
5589 __ bind(&next_probe[i]);
5590 }
5591
5592 // No matching 2 character string found by probing.
5593 __ jmp(not_found);
5594
5595 // Scratch register contains result when we fall through to here.
Ben Murdoch692be652012-01-10 18:47:50 +00005596 Register result = candidate;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005597 __ bind(&found_in_symbol_table);
5598 __ pop(mask); // Pop saved mask from the stack.
5599 if (!result.is(eax)) {
5600 __ mov(eax, result);
5601 }
5602}
5603
5604
5605void StringHelper::GenerateHashInit(MacroAssembler* masm,
5606 Register hash,
5607 Register character,
5608 Register scratch) {
Ben Murdochc7cc0282012-03-05 14:35:55 +00005609 // hash = (seed + character) + ((seed + character) << 10);
5610 if (Serializer::enabled()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01005611 ExternalReference roots_address =
5612 ExternalReference::roots_address(masm->isolate());
Ben Murdochc7cc0282012-03-05 14:35:55 +00005613 __ mov(scratch, Immediate(Heap::kHashSeedRootIndex));
5614 __ mov(scratch, Operand::StaticArray(scratch,
5615 times_pointer_size,
Ben Murdoch85b71792012-04-11 18:30:58 +01005616 roots_address));
Ben Murdochc7cc0282012-03-05 14:35:55 +00005617 __ SmiUntag(scratch);
Ben Murdoch85b71792012-04-11 18:30:58 +01005618 __ add(scratch, Operand(character));
Ben Murdochc7cc0282012-03-05 14:35:55 +00005619 __ mov(hash, scratch);
5620 __ shl(scratch, 10);
Ben Murdoch85b71792012-04-11 18:30:58 +01005621 __ add(hash, Operand(scratch));
Ben Murdochc7cc0282012-03-05 14:35:55 +00005622 } else {
5623 int32_t seed = masm->isolate()->heap()->HashSeed();
5624 __ lea(scratch, Operand(character, seed));
5625 __ shl(scratch, 10);
5626 __ lea(hash, Operand(scratch, character, times_1, seed));
5627 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005628 // hash ^= hash >> 6;
5629 __ mov(scratch, hash);
Ben Murdoch692be652012-01-10 18:47:50 +00005630 __ shr(scratch, 6);
Ben Murdoch85b71792012-04-11 18:30:58 +01005631 __ xor_(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005632}
5633
5634
5635void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
5636 Register hash,
5637 Register character,
5638 Register scratch) {
5639 // hash += character;
Ben Murdoch85b71792012-04-11 18:30:58 +01005640 __ add(hash, Operand(character));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005641 // hash += hash << 10;
5642 __ mov(scratch, hash);
5643 __ shl(scratch, 10);
Ben Murdoch85b71792012-04-11 18:30:58 +01005644 __ add(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005645 // hash ^= hash >> 6;
5646 __ mov(scratch, hash);
Ben Murdoch692be652012-01-10 18:47:50 +00005647 __ shr(scratch, 6);
Ben Murdoch85b71792012-04-11 18:30:58 +01005648 __ xor_(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005649}
5650
5651
5652void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
5653 Register hash,
5654 Register scratch) {
5655 // hash += hash << 3;
5656 __ mov(scratch, hash);
5657 __ shl(scratch, 3);
Ben Murdoch85b71792012-04-11 18:30:58 +01005658 __ add(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005659 // hash ^= hash >> 11;
5660 __ mov(scratch, hash);
Ben Murdoch692be652012-01-10 18:47:50 +00005661 __ shr(scratch, 11);
Ben Murdoch85b71792012-04-11 18:30:58 +01005662 __ xor_(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005663 // hash += hash << 15;
5664 __ mov(scratch, hash);
5665 __ shl(scratch, 15);
Ben Murdoch85b71792012-04-11 18:30:58 +01005666 __ add(hash, Operand(scratch));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005667
Ben Murdochc7cc0282012-03-05 14:35:55 +00005668 __ and_(hash, String::kHashBitMask);
Ben Murdoch692be652012-01-10 18:47:50 +00005669
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005670 // if (hash == 0) hash = 27;
Ben Murdoch257744e2011-11-30 15:57:28 +00005671 Label hash_not_zero;
Ben Murdoch257744e2011-11-30 15:57:28 +00005672 __ j(not_zero, &hash_not_zero, Label::kNear);
Ben Murdochc7cc0282012-03-05 14:35:55 +00005673 __ mov(hash, Immediate(StringHasher::kZeroHash));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005674 __ bind(&hash_not_zero);
5675}
5676
5677
5678void SubStringStub::Generate(MacroAssembler* masm) {
5679 Label runtime;
5680
5681 // Stack frame on entry.
5682 // esp[0]: return address
5683 // esp[4]: to
5684 // esp[8]: from
5685 // esp[12]: string
5686
5687 // Make sure first argument is a string.
5688 __ mov(eax, Operand(esp, 3 * kPointerSize));
5689 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005690 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005691 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
5692 __ j(NegateCondition(is_string), &runtime);
5693
5694 // eax: string
5695 // ebx: instance type
5696
5697 // Calculate length of sub string using the smi values.
Ben Murdoch85b71792012-04-11 18:30:58 +01005698 Label result_longer_than_two;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005699 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005700 __ JumpIfNotSmi(ecx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005701 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005702 __ JumpIfNotSmi(edx, &runtime);
Ben Murdoch85b71792012-04-11 18:30:58 +01005703 __ sub(ecx, Operand(edx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005704 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01005705 Label return_eax;
5706 __ j(equal, &return_eax);
5707 // Special handling of sub-strings of length 1 and 2. One character strings
5708 // are handled in the runtime system (looked up in the single character
5709 // cache). Two character strings are looked for in the symbol cache.
5710 __ SmiUntag(ecx); // Result length is no longer smi.
5711 __ cmp(ecx, 2);
5712 __ j(greater, &result_longer_than_two);
5713 __ j(less, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005714
Ben Murdoch85b71792012-04-11 18:30:58 +01005715 // Sub string of length 2 requested.
Ben Murdochc7cc0282012-03-05 14:35:55 +00005716 // eax: string
5717 // ebx: instance type
Ben Murdoch85b71792012-04-11 18:30:58 +01005718 // ecx: sub string length (value is 2)
Ben Murdochc7cc0282012-03-05 14:35:55 +00005719 // edx: from index (smi)
Ben Murdoch85b71792012-04-11 18:30:58 +01005720 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
Ben Murdochc7cc0282012-03-05 14:35:55 +00005721
Ben Murdoch85b71792012-04-11 18:30:58 +01005722 // Get the two characters forming the sub string.
5723 __ SmiUntag(edx); // From index is no longer smi.
5724 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
5725 __ movzx_b(ecx,
5726 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
5727
5728 // Try to lookup two character string in symbol table.
5729 Label make_two_character_string;
5730 StringHelper::GenerateTwoCharacterSymbolTableProbe(
5731 masm, ebx, ecx, eax, edx, edi,
5732 &make_two_character_string, &make_two_character_string);
5733 __ ret(3 * kPointerSize);
5734
5735 __ bind(&make_two_character_string);
5736 // Setup registers for allocating the two character string.
5737 __ mov(eax, Operand(esp, 3 * kPointerSize));
5738 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005739 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01005740 __ Set(ecx, Immediate(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005741
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005742 if (FLAG_string_slices) {
5743 Label copy_routine;
Ben Murdoch85b71792012-04-11 18:30:58 +01005744 // If coming from the make_two_character_string path, the string
5745 // is too short to be sliced anyways.
5746 STATIC_ASSERT(2 < SlicedString::kMinLength);
5747 __ jmp(&copy_routine);
5748 __ bind(&result_longer_than_two);
5749
5750 // eax: string
5751 // ebx: instance type
5752 // ecx: sub string length
5753 // edx: from index (smi)
5754 Label allocate_slice, sliced_string, seq_string;
5755 __ cmp(ecx, SlicedString::kMinLength);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005756 // Short slice. Copy instead of slicing.
5757 __ j(less, &copy_routine);
Ben Murdoch85b71792012-04-11 18:30:58 +01005758 STATIC_ASSERT(kSeqStringTag == 0);
5759 __ test(ebx, Immediate(kStringRepresentationMask));
5760 __ j(zero, &seq_string, Label::kNear);
5761 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
5762 STATIC_ASSERT(kIsIndirectStringMask != 0);
5763 __ test(ebx, Immediate(kIsIndirectStringMask));
5764 // External string. Jump to runtime.
5765 __ j(zero, &runtime);
5766
5767 Factory* factory = masm->isolate()->factory();
5768 __ test(ebx, Immediate(kSlicedNotConsMask));
5769 __ j(not_zero, &sliced_string, Label::kNear);
5770 // Cons string. Check whether it is flat, then fetch first part.
5771 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
5772 factory->empty_string());
5773 __ j(not_equal, &runtime);
5774 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
5775 __ jmp(&allocate_slice, Label::kNear);
5776
5777 __ bind(&sliced_string);
5778 // Sliced string. Fetch parent and correct start index by offset.
5779 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
5780 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
5781 __ jmp(&allocate_slice, Label::kNear);
5782
5783 __ bind(&seq_string);
5784 // Sequential string. Just move string to the right register.
5785 __ mov(edi, eax);
5786
5787 __ bind(&allocate_slice);
5788 // edi: underlying subject string
5789 // ebx: instance type of original subject string
5790 // edx: offset
5791 // ecx: length
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005792 // Allocate new sliced string. At this point we do not reload the instance
5793 // type including the string encoding because we simply rely on the info
5794 // provided by the original string. It does not matter if the original
5795 // string's encoding is wrong because we always have to recheck encoding of
5796 // the newly created string's parent anyways due to externalized strings.
5797 Label two_byte_slice, set_slice_header;
Ben Murdoch589d6972011-11-30 16:04:58 +00005798 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
5799 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
5800 __ test(ebx, Immediate(kStringEncodingMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005801 __ j(zero, &two_byte_slice, Label::kNear);
5802 __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
5803 __ jmp(&set_slice_header, Label::kNear);
5804 __ bind(&two_byte_slice);
Ben Murdoch589d6972011-11-30 16:04:58 +00005805 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005806 __ bind(&set_slice_header);
Ben Murdoch85b71792012-04-11 18:30:58 +01005807 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
5808 __ SmiTag(ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005809 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005810 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005811 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
5812 Immediate(String::kEmptyHashField));
Ben Murdoch85b71792012-04-11 18:30:58 +01005813 __ jmp(&return_eax);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005814
5815 __ bind(&copy_routine);
Ben Murdoch85b71792012-04-11 18:30:58 +01005816 } else {
5817 __ bind(&result_longer_than_two);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00005818 }
5819
Ben Murdoch85b71792012-04-11 18:30:58 +01005820 // eax: string
5821 // ebx: instance type
5822 // ecx: result string length
5823 // Check for flat ascii string
5824 Label non_ascii_flat;
5825 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005826
Ben Murdoch85b71792012-04-11 18:30:58 +01005827 // Allocate the result.
5828 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005829
5830 // eax: result string
5831 // ecx: result string length
5832 __ mov(edx, esi); // esi used by following code.
5833 // Locate first character of result.
5834 __ mov(edi, eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01005835 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005836 // Load string argument and locate character of sub string start.
Ben Murdoch85b71792012-04-11 18:30:58 +01005837 __ mov(esi, Operand(esp, 3 * kPointerSize));
5838 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
5839 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005840 __ SmiUntag(ebx);
Ben Murdoch85b71792012-04-11 18:30:58 +01005841 __ add(esi, Operand(ebx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005842
5843 // eax: result string
5844 // ecx: result length
5845 // edx: original value of esi
5846 // edi: first character of result
5847 // esi: character of sub string start
5848 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
5849 __ mov(esi, edx); // Restore esi.
Ben Murdoch85b71792012-04-11 18:30:58 +01005850 Counters* counters = masm->isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01005851 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005852 __ ret(3 * kPointerSize);
5853
Ben Murdoch85b71792012-04-11 18:30:58 +01005854 __ bind(&non_ascii_flat);
5855 // eax: string
5856 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
5857 // ecx: result string length
5858 // Check for flat two byte string
5859 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
5860 __ j(not_equal, &runtime);
5861
5862 // Allocate the result.
5863 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005864
5865 // eax: result string
5866 // ecx: result string length
5867 __ mov(edx, esi); // esi used by following code.
5868 // Locate first character of result.
5869 __ mov(edi, eax);
Ben Murdoch85b71792012-04-11 18:30:58 +01005870 __ add(Operand(edi),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005871 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5872 // Load string argument and locate character of sub string start.
Ben Murdoch85b71792012-04-11 18:30:58 +01005873 __ mov(esi, Operand(esp, 3 * kPointerSize));
5874 __ add(Operand(esi),
5875 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
5876 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005877 // As from is a smi it is 2 times the value which matches the size of a two
5878 // byte character.
5879 STATIC_ASSERT(kSmiTag == 0);
5880 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch85b71792012-04-11 18:30:58 +01005881 __ add(esi, Operand(ebx));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005882
5883 // eax: result string
5884 // ecx: result length
5885 // edx: original value of esi
5886 // edi: first character of result
5887 // esi: character of sub string start
5888 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
5889 __ mov(esi, edx); // Restore esi.
Ben Murdoch85b71792012-04-11 18:30:58 +01005890
5891 __ bind(&return_eax);
Steve Block44f0eee2011-05-26 01:26:41 +01005892 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005893 __ ret(3 * kPointerSize);
5894
5895 // Just jump to runtime to create the sub string.
5896 __ bind(&runtime);
5897 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5898}
5899
5900
Ben Murdoch257744e2011-11-30 15:57:28 +00005901void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
5902 Register left,
5903 Register right,
5904 Register scratch1,
5905 Register scratch2) {
5906 Register length = scratch1;
5907
5908 // Compare lengths.
5909 Label strings_not_equal, check_zero_length;
5910 __ mov(length, FieldOperand(left, String::kLengthOffset));
5911 __ cmp(length, FieldOperand(right, String::kLengthOffset));
5912 __ j(equal, &check_zero_length, Label::kNear);
5913 __ bind(&strings_not_equal);
5914 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
5915 __ ret(0);
5916
5917 // Check if the length is zero.
5918 Label compare_chars;
5919 __ bind(&check_zero_length);
5920 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01005921 __ test(length, Operand(length));
Ben Murdoch257744e2011-11-30 15:57:28 +00005922 __ j(not_zero, &compare_chars, Label::kNear);
5923 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5924 __ ret(0);
5925
5926 // Compare characters.
5927 __ bind(&compare_chars);
5928 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5929 &strings_not_equal, Label::kNear);
5930
5931 // Characters are equal.
5932 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5933 __ ret(0);
5934}
5935
5936
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005937void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
5938 Register left,
5939 Register right,
5940 Register scratch1,
5941 Register scratch2,
5942 Register scratch3) {
Steve Block44f0eee2011-05-26 01:26:41 +01005943 Counters* counters = masm->isolate()->counters();
5944 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005945
5946 // Find minimum length.
Ben Murdoch257744e2011-11-30 15:57:28 +00005947 Label left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005948 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
5949 __ mov(scratch3, scratch1);
5950 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
5951
5952 Register length_delta = scratch3;
5953
Ben Murdoch257744e2011-11-30 15:57:28 +00005954 __ j(less_equal, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005955 // Right string is shorter. Change scratch1 to be length of right string.
Ben Murdoch85b71792012-04-11 18:30:58 +01005956 __ sub(scratch1, Operand(length_delta));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005957 __ bind(&left_shorter);
5958
5959 Register min_length = scratch1;
5960
5961 // If either length is zero, just compare lengths.
Ben Murdoch257744e2011-11-30 15:57:28 +00005962 Label compare_lengths;
Ben Murdoch85b71792012-04-11 18:30:58 +01005963 __ test(min_length, Operand(min_length));
Ben Murdoch257744e2011-11-30 15:57:28 +00005964 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005965
Ben Murdoch257744e2011-11-30 15:57:28 +00005966 // Compare characters.
5967 Label result_not_equal;
5968 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5969 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005970
5971 // Compare lengths - strings up to min-length are equal.
5972 __ bind(&compare_lengths);
Ben Murdoch85b71792012-04-11 18:30:58 +01005973 __ test(length_delta, Operand(length_delta));
Ben Murdoch257744e2011-11-30 15:57:28 +00005974 __ j(not_zero, &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005975
5976 // Result is EQUAL.
5977 STATIC_ASSERT(EQUAL == 0);
5978 STATIC_ASSERT(kSmiTag == 0);
5979 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5980 __ ret(0);
5981
Ben Murdoch257744e2011-11-30 15:57:28 +00005982 Label result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005983 __ bind(&result_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00005984 __ j(greater, &result_greater, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005985
5986 // Result is LESS.
5987 __ Set(eax, Immediate(Smi::FromInt(LESS)));
5988 __ ret(0);
5989
5990 // Result is GREATER.
5991 __ bind(&result_greater);
5992 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
5993 __ ret(0);
5994}
5995
5996
Ben Murdoch257744e2011-11-30 15:57:28 +00005997void StringCompareStub::GenerateAsciiCharsCompareLoop(
5998 MacroAssembler* masm,
5999 Register left,
6000 Register right,
6001 Register length,
6002 Register scratch,
6003 Label* chars_not_equal,
6004 Label::Distance chars_not_equal_near) {
6005 // Change index to run from -length to -1 by adding length to string
6006 // start. This means that loop ends when index reaches zero, which
6007 // doesn't need an additional compare.
6008 __ SmiUntag(length);
6009 __ lea(left,
6010 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
6011 __ lea(right,
6012 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
6013 __ neg(length);
6014 Register index = length; // index = -length;
6015
6016 // Compare loop.
6017 Label loop;
6018 __ bind(&loop);
6019 __ mov_b(scratch, Operand(left, index, times_1, 0));
6020 __ cmpb(scratch, Operand(right, index, times_1, 0));
6021 __ j(not_equal, chars_not_equal, chars_not_equal_near);
Ben Murdoch85b71792012-04-11 18:30:58 +01006022 __ add(Operand(index), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00006023 __ j(not_zero, &loop);
6024}
6025
6026
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006027void StringCompareStub::Generate(MacroAssembler* masm) {
6028 Label runtime;
6029
6030 // Stack frame on entry.
6031 // esp[0]: return address
6032 // esp[4]: right string
6033 // esp[8]: left string
6034
6035 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
6036 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
6037
Ben Murdoch257744e2011-11-30 15:57:28 +00006038 Label not_same;
Ben Murdoch85b71792012-04-11 18:30:58 +01006039 __ cmp(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00006040 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006041 STATIC_ASSERT(EQUAL == 0);
6042 STATIC_ASSERT(kSmiTag == 0);
6043 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Steve Block44f0eee2011-05-26 01:26:41 +01006044 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006045 __ ret(2 * kPointerSize);
6046
6047 __ bind(&not_same);
6048
Ben Murdoch85b71792012-04-11 18:30:58 +01006049 // Check that both objects are sequential ascii strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006050 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
6051
Ben Murdoch85b71792012-04-11 18:30:58 +01006052 // Compare flat ascii strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006053 // Drop arguments from the stack.
6054 __ pop(ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01006055 __ add(Operand(esp), Immediate(2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006056 __ push(ecx);
6057 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
6058
6059 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
6060 // tagged as a small integer.
6061 __ bind(&runtime);
6062 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
6063}
6064
Ben Murdochb0fe1622011-05-05 13:52:32 +01006065
Ben Murdochb0fe1622011-05-05 13:52:32 +01006066void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
6067 ASSERT(state_ == CompareIC::SMIS);
Ben Murdoch257744e2011-11-30 15:57:28 +00006068 Label miss;
Ben Murdoch85b71792012-04-11 18:30:58 +01006069 __ mov(ecx, Operand(edx));
6070 __ or_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006071 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006072
6073 if (GetCondition() == equal) {
6074 // For equality we do not care about the sign of the result.
Ben Murdoch85b71792012-04-11 18:30:58 +01006075 __ sub(eax, Operand(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01006076 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00006077 Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01006078 __ sub(edx, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00006079 __ j(no_overflow, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006080 // Correct sign of result in case of overflow.
6081 __ not_(edx);
6082 __ bind(&done);
6083 __ mov(eax, edx);
6084 }
6085 __ ret(0);
6086
6087 __ bind(&miss);
6088 GenerateMiss(masm);
6089}
6090
6091
6092void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
6093 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
6094
Ben Murdoch257744e2011-11-30 15:57:28 +00006095 Label generic_stub;
Ben Murdoch85b71792012-04-11 18:30:58 +01006096 Label unordered;
Ben Murdoch257744e2011-11-30 15:57:28 +00006097 Label miss;
Ben Murdoch85b71792012-04-11 18:30:58 +01006098 __ mov(ecx, Operand(edx));
6099 __ and_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006100 __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006101
6102 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01006103 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006104 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01006105 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006106
6107 // Inlining the double comparison and falling back to the general compare
6108 // stub if NaN is involved or SS2 or CMOV is unsupported.
Ben Murdoch8b112d22011-06-08 16:22:53 +01006109 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01006110 CpuFeatures::Scope scope1(SSE2);
6111 CpuFeatures::Scope scope2(CMOV);
6112
6113 // Load left and right operand
6114 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
6115 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
6116
6117 // Compare operands
6118 __ ucomisd(xmm0, xmm1);
6119
6120 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00006121 __ j(parity_even, &unordered, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006122
6123 // Return a result of -1, 0, or 1, based on EFLAGS.
6124 // Performing mov, because xor would destroy the flag register.
6125 __ mov(eax, 0); // equal
6126 __ mov(ecx, Immediate(Smi::FromInt(1)));
Ben Murdoch85b71792012-04-11 18:30:58 +01006127 __ cmov(above, eax, Operand(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01006128 __ mov(ecx, Immediate(Smi::FromInt(-1)));
Ben Murdoch85b71792012-04-11 18:30:58 +01006129 __ cmov(below, eax, Operand(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01006130 __ ret(0);
Ben Murdoch85b71792012-04-11 18:30:58 +01006131
6132 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006133 }
6134
6135 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
6136 __ bind(&generic_stub);
6137 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
6138
6139 __ bind(&miss);
6140 GenerateMiss(masm);
6141}
6142
6143
Ben Murdoch257744e2011-11-30 15:57:28 +00006144void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
6145 ASSERT(state_ == CompareIC::SYMBOLS);
6146 ASSERT(GetCondition() == equal);
6147
6148 // Registers containing left and right operands respectively.
6149 Register left = edx;
6150 Register right = eax;
6151 Register tmp1 = ecx;
6152 Register tmp2 = ebx;
6153
6154 // Check that both operands are heap objects.
6155 Label miss;
Ben Murdoch85b71792012-04-11 18:30:58 +01006156 __ mov(tmp1, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +00006157 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01006158 __ and_(tmp1, Operand(right));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006159 __ JumpIfSmi(tmp1, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00006160
6161 // Check that both operands are symbols.
6162 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6163 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6164 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6165 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6166 STATIC_ASSERT(kSymbolTag != 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01006167 __ and_(tmp1, Operand(tmp2));
Ben Murdoch257744e2011-11-30 15:57:28 +00006168 __ test(tmp1, Immediate(kIsSymbolMask));
6169 __ j(zero, &miss, Label::kNear);
6170
6171 // Symbols are compared by identity.
6172 Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01006173 __ cmp(left, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00006174 // Make sure eax is non-zero. At this point input operands are
6175 // guaranteed to be non-zero.
6176 ASSERT(right.is(eax));
6177 __ j(not_equal, &done, Label::kNear);
6178 STATIC_ASSERT(EQUAL == 0);
6179 STATIC_ASSERT(kSmiTag == 0);
6180 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6181 __ bind(&done);
6182 __ ret(0);
6183
6184 __ bind(&miss);
6185 GenerateMiss(masm);
6186}
6187
6188
6189void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
6190 ASSERT(state_ == CompareIC::STRINGS);
Ben Murdoch85b71792012-04-11 18:30:58 +01006191 ASSERT(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00006192 Label miss;
6193
6194 // Registers containing left and right operands respectively.
6195 Register left = edx;
6196 Register right = eax;
6197 Register tmp1 = ecx;
6198 Register tmp2 = ebx;
6199 Register tmp3 = edi;
6200
6201 // Check that both operands are heap objects.
Ben Murdoch85b71792012-04-11 18:30:58 +01006202 __ mov(tmp1, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +00006203 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01006204 __ and_(tmp1, Operand(right));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006205 __ JumpIfSmi(tmp1, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00006206
6207 // Check that both operands are strings. This leaves the instance
6208 // types loaded in tmp1 and tmp2.
6209 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
6210 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
6211 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
6212 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
6213 __ mov(tmp3, tmp1);
6214 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01006215 __ or_(tmp3, Operand(tmp2));
Ben Murdoch257744e2011-11-30 15:57:28 +00006216 __ test(tmp3, Immediate(kIsNotStringMask));
6217 __ j(not_zero, &miss);
6218
6219 // Fast check for identical strings.
6220 Label not_same;
Ben Murdoch85b71792012-04-11 18:30:58 +01006221 __ cmp(left, Operand(right));
Ben Murdoch257744e2011-11-30 15:57:28 +00006222 __ j(not_equal, &not_same, Label::kNear);
6223 STATIC_ASSERT(EQUAL == 0);
6224 STATIC_ASSERT(kSmiTag == 0);
6225 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
6226 __ ret(0);
6227
6228 // Handle not identical strings.
6229 __ bind(&not_same);
6230
6231 // Check that both strings are symbols. If they are, we're done
Ben Murdoch85b71792012-04-11 18:30:58 +01006232 // because we already know they are not identical.
6233 Label do_compare;
6234 STATIC_ASSERT(kSymbolTag != 0);
6235 __ and_(tmp1, Operand(tmp2));
6236 __ test(tmp1, Immediate(kIsSymbolMask));
6237 __ j(zero, &do_compare, Label::kNear);
6238 // Make sure eax is non-zero. At this point input operands are
6239 // guaranteed to be non-zero.
6240 ASSERT(right.is(eax));
6241 __ ret(0);
Ben Murdoch257744e2011-11-30 15:57:28 +00006242
6243 // Check that both strings are sequential ASCII.
6244 Label runtime;
Ben Murdoch85b71792012-04-11 18:30:58 +01006245 __ bind(&do_compare);
Ben Murdoch257744e2011-11-30 15:57:28 +00006246 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
6247
6248 // Compare flat ASCII strings. Returns when done.
Ben Murdoch85b71792012-04-11 18:30:58 +01006249 StringCompareStub::GenerateFlatAsciiStringEquals(
6250 masm, left, right, tmp1, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00006251
6252 // Handle more complex cases in runtime.
6253 __ bind(&runtime);
6254 __ pop(tmp1); // Return address.
6255 __ push(left);
6256 __ push(right);
6257 __ push(tmp1);
Ben Murdoch85b71792012-04-11 18:30:58 +01006258 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00006259
6260 __ bind(&miss);
6261 GenerateMiss(masm);
6262}
6263
6264
Ben Murdochb0fe1622011-05-05 13:52:32 +01006265void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
6266 ASSERT(state_ == CompareIC::OBJECTS);
Ben Murdoch257744e2011-11-30 15:57:28 +00006267 Label miss;
Ben Murdoch85b71792012-04-11 18:30:58 +01006268 __ mov(ecx, Operand(edx));
6269 __ and_(ecx, Operand(eax));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00006270 __ JumpIfSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006271
6272 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006273 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006274 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00006275 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006276
6277 ASSERT(GetCondition() == equal);
Ben Murdoch85b71792012-04-11 18:30:58 +01006278 __ sub(eax, Operand(edx));
Ben Murdochc7cc0282012-03-05 14:35:55 +00006279 __ ret(0);
6280
6281 __ bind(&miss);
6282 GenerateMiss(masm);
6283}
6284
6285
6286void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch85b71792012-04-11 18:30:58 +01006287 // Save the registers.
6288 __ pop(ecx);
6289 __ push(edx);
6290 __ push(eax);
6291 __ push(ecx);
6292
6293 // Call the runtime system in a fresh internal frame.
6294 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
6295 masm->isolate());
6296 __ EnterInternalFrame();
6297 __ push(edx);
6298 __ push(eax);
6299 __ push(Immediate(Smi::FromInt(op_)));
6300 __ CallExternalReference(miss, 3);
6301 __ LeaveInternalFrame();
6302
6303 // Compute the entry point of the rewritten stub.
6304 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
6305
6306 // Restore registers.
6307 __ pop(ecx);
6308 __ pop(eax);
6309 __ pop(edx);
6310 __ push(ecx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006311
Ben Murdochb0fe1622011-05-05 13:52:32 +01006312 // Do a tail call to the rewritten stub.
Ben Murdoch85b71792012-04-11 18:30:58 +01006313 __ jmp(Operand(edi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01006314}
6315
6316
Ben Murdoch257744e2011-11-30 15:57:28 +00006317// Helper function used to check that the dictionary doesn't contain
6318// the property. This function may return false negatives, so miss_label
6319// must always call a backup property check that is complete.
6320// This function is safe to call if the receiver has fast properties.
6321// Name must be a symbol and receiver must be a heap object.
Ben Murdoch85b71792012-04-11 18:30:58 +01006322MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
6323 MacroAssembler* masm,
6324 Label* miss,
6325 Label* done,
6326 Register properties,
6327 String* name,
6328 Register r0) {
Ben Murdoch257744e2011-11-30 15:57:28 +00006329 ASSERT(name->IsSymbol());
6330
6331 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6332 // not equal to the name and kProbes-th slot is not used (its name is the
6333 // undefined value), it guarantees the hash table doesn't contain the
6334 // property. It's true even if some slots represent deleted properties
Ben Murdoch85b71792012-04-11 18:30:58 +01006335 // (their names are the null value).
Ben Murdoch257744e2011-11-30 15:57:28 +00006336 for (int i = 0; i < kInlinedProbes; i++) {
6337 // Compute the masked index: (hash + i + i * i) & mask.
6338 Register index = r0;
6339 // Capacity is smi 2^n.
6340 __ mov(index, FieldOperand(properties, kCapacityOffset));
6341 __ dec(index);
Ben Murdoch85b71792012-04-11 18:30:58 +01006342 __ and_(Operand(index),
6343 Immediate(Smi::FromInt(name->Hash() +
Ben Murdoch257744e2011-11-30 15:57:28 +00006344 StringDictionary::GetProbeOffset(i))));
6345
6346 // Scale the index by multiplying by the entry size.
6347 ASSERT(StringDictionary::kEntrySize == 3);
6348 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
6349 Register entity_name = r0;
6350 // Having undefined at this place means the name is not contained.
6351 ASSERT_EQ(kSmiTagSize, 1);
6352 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
6353 kElementsStartOffset - kHeapObjectTag));
6354 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6355 __ j(equal, done);
6356
6357 // Stop if found the property.
6358 __ cmp(entity_name, Handle<String>(name));
6359 __ j(equal, miss);
6360
6361 // Check if the entry name is not a symbol.
6362 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
6363 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
6364 kIsSymbolMask);
6365 __ j(zero, miss);
6366 }
6367
6368 StringDictionaryLookupStub stub(properties,
6369 r0,
6370 r0,
6371 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
6372 __ push(Immediate(Handle<Object>(name)));
6373 __ push(Immediate(name->Hash()));
Ben Murdoch85b71792012-04-11 18:30:58 +01006374 MaybeObject* result = masm->TryCallStub(&stub);
6375 if (result->IsFailure()) return result;
6376 __ test(r0, Operand(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00006377 __ j(not_zero, miss);
6378 __ jmp(done);
Ben Murdoch85b71792012-04-11 18:30:58 +01006379 return result;
Ben Murdoch257744e2011-11-30 15:57:28 +00006380}
6381
6382
6383// Probe the string dictionary in the |elements| register. Jump to the
6384// |done| label if a property with the given name is found leaving the
6385// index into the dictionary in |r0|. Jump to the |miss| label
6386// otherwise.
6387void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
6388 Label* miss,
6389 Label* done,
6390 Register elements,
6391 Register name,
6392 Register r0,
6393 Register r1) {
6394 // Assert that name contains a string.
6395 if (FLAG_debug_code) __ AbortIfNotString(name);
6396
6397 __ mov(r1, FieldOperand(elements, kCapacityOffset));
6398 __ shr(r1, kSmiTagSize); // convert smi to int
6399 __ dec(r1);
6400
6401 // Generate an unrolled loop that performs a few probes before
6402 // giving up. Measurements done on Gmail indicate that 2 probes
6403 // cover ~93% of loads from dictionaries.
6404 for (int i = 0; i < kInlinedProbes; i++) {
6405 // Compute the masked index: (hash + i + i * i) & mask.
6406 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6407 __ shr(r0, String::kHashShift);
6408 if (i > 0) {
Ben Murdoch85b71792012-04-11 18:30:58 +01006409 __ add(Operand(r0), Immediate(StringDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00006410 }
Ben Murdoch85b71792012-04-11 18:30:58 +01006411 __ and_(r0, Operand(r1));
Ben Murdoch257744e2011-11-30 15:57:28 +00006412
6413 // Scale the index by multiplying by the entry size.
6414 ASSERT(StringDictionary::kEntrySize == 3);
6415 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
6416
6417 // Check if the key is identical to the name.
6418 __ cmp(name, Operand(elements,
6419 r0,
6420 times_4,
6421 kElementsStartOffset - kHeapObjectTag));
6422 __ j(equal, done);
6423 }
6424
6425 StringDictionaryLookupStub stub(elements,
6426 r1,
6427 r0,
6428 POSITIVE_LOOKUP);
6429 __ push(name);
6430 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6431 __ shr(r0, String::kHashShift);
6432 __ push(r0);
6433 __ CallStub(&stub);
6434
Ben Murdoch85b71792012-04-11 18:30:58 +01006435 __ test(r1, Operand(r1));
Ben Murdoch257744e2011-11-30 15:57:28 +00006436 __ j(zero, miss);
6437 __ jmp(done);
6438}
6439
6440
6441void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6442 // Stack frame on entry:
6443 // esp[0 * kPointerSize]: return address.
6444 // esp[1 * kPointerSize]: key's hash.
6445 // esp[2 * kPointerSize]: key.
6446 // Registers:
6447 // dictionary_: StringDictionary to probe.
6448 // result_: used as scratch.
6449 // index_: will hold an index of entry if lookup is successful.
6450 // might alias with result_.
6451 // Returns:
6452 // result_ is zero if lookup failed, non zero otherwise.
6453
6454 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
6455
6456 Register scratch = result_;
6457
6458 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
6459 __ dec(scratch);
6460 __ SmiUntag(scratch);
6461 __ push(scratch);
6462
6463 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6464 // not equal to the name and kProbes-th slot is not used (its name is the
6465 // undefined value), it guarantees the hash table doesn't contain the
6466 // property. It's true even if some slots represent deleted properties
6467 // (their names are the null value).
6468 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6469 // Compute the masked index: (hash + i + i * i) & mask.
6470 __ mov(scratch, Operand(esp, 2 * kPointerSize));
6471 if (i > 0) {
Ben Murdoch85b71792012-04-11 18:30:58 +01006472 __ add(Operand(scratch),
6473 Immediate(StringDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00006474 }
6475 __ and_(scratch, Operand(esp, 0));
6476
6477 // Scale the index by multiplying by the entry size.
6478 ASSERT(StringDictionary::kEntrySize == 3);
6479 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6480
6481 // Having undefined at this place means the name is not contained.
6482 ASSERT_EQ(kSmiTagSize, 1);
6483 __ mov(scratch, Operand(dictionary_,
6484 index_,
6485 times_pointer_size,
6486 kElementsStartOffset - kHeapObjectTag));
6487 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
6488 __ j(equal, &not_in_dictionary);
6489
6490 // Stop if found the property.
6491 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
6492 __ j(equal, &in_dictionary);
6493
6494 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6495 // If we hit a non symbol key during negative lookup
6496 // we have to bailout as this key might be equal to the
6497 // key we are looking for.
6498
6499 // Check if the entry name is not a symbol.
6500 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6501 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset),
6502 kIsSymbolMask);
6503 __ j(zero, &maybe_in_dictionary);
6504 }
6505 }
6506
6507 __ bind(&maybe_in_dictionary);
6508 // If we are doing negative lookup then probing failure should be
6509 // treated as a lookup success. For positive lookup probing failure
6510 // should be treated as lookup failure.
6511 if (mode_ == POSITIVE_LOOKUP) {
6512 __ mov(result_, Immediate(0));
6513 __ Drop(1);
6514 __ ret(2 * kPointerSize);
6515 }
6516
6517 __ bind(&in_dictionary);
6518 __ mov(result_, Immediate(1));
6519 __ Drop(1);
6520 __ ret(2 * kPointerSize);
6521
6522 __ bind(&not_in_dictionary);
6523 __ mov(result_, Immediate(0));
6524 __ Drop(1);
6525 __ ret(2 * kPointerSize);
6526}
6527
6528
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006529#undef __
6530
6531} } // namespace v8::internal
6532
6533#endif // V8_TARGET_ARCH_IA32