blob: 7075e66a65f0aa847467457d7300d65e1e662cb3 [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
34#include "regexp-macro-assembler.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010040
41void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax.
Ben Murdoch257744e2011-11-30 15:57:28 +000043 Label check_heap_number, call_builtin;
Steve Block1e0659c2011-05-24 12:43:12 +010044 __ SmiTest(rax);
Ben Murdoch257744e2011-11-30 15:57:28 +000045 __ j(not_zero, &check_heap_number, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010046 __ Ret();
47
48 __ bind(&check_heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +010049 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
50 Heap::kHeapNumberMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +000051 __ j(not_equal, &call_builtin, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +010052 __ Ret();
53
54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address.
56 __ push(rax);
57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59}
60
61
Kristian Monsen80d68ea2010-09-08 11:05:35 +010062void FastNewClosureStub::Generate(MacroAssembler* masm) {
63 // Create a new closure from the given function info in new
64 // space. Set the context to the current context in rsi.
65 Label gc;
66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
67
68 // Get the function info from the stack.
69 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
Steve Block44f0eee2011-05-26 01:26:41 +010071 int map_index = strict_mode_ == kStrictMode
72 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
73 : Context::FUNCTION_MAP_INDEX;
74
Kristian Monsen80d68ea2010-09-08 11:05:35 +010075 // Compute the function map in the current global context and set that
76 // as the map of the allocated object.
77 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
78 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010079 __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010080 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
81
82 // Initialize the rest of the function. We don't have to update the
83 // write barrier because the allocated object is in new space.
84 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
85 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +010086 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010087 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
88 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
89 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
90 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
91 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
92 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010093 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010094
95 // Initialize the code pointer in the function to be the one
96 // found in the shared function info object.
97 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
98 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
99 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
100
101
102 // Return and remove the on-stack parameter.
103 __ ret(1 * kPointerSize);
104
105 // Create a new closure through the slower runtime call.
106 __ bind(&gc);
107 __ pop(rcx); // Temporarily remove return address.
108 __ pop(rdx);
109 __ push(rsi);
110 __ push(rdx);
Steve Block44f0eee2011-05-26 01:26:41 +0100111 __ PushRoot(Heap::kFalseValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100112 __ push(rcx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800113 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100114}
115
116
117void FastNewContextStub::Generate(MacroAssembler* masm) {
118 // Try to allocate the context in new space.
119 Label gc;
120 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
121 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
122 rax, rbx, rcx, &gc, TAG_OBJECT);
123
124 // Get the function from the stack.
125 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
126
127 // Setup the object header.
128 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
129 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
130 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
131
132 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100133 __ Set(rbx, 0); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100134 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
135 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
136 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
137 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
138
139 // Copy the global object from the surrounding context.
140 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
141 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
142
143 // Initialize the rest of the slots to undefined.
144 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
145 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
146 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
147 }
148
149 // Return and remove the on-stack parameter.
150 __ movq(rsi, rax);
151 __ ret(1 * kPointerSize);
152
153 // Need to collect. Call into runtime system.
154 __ bind(&gc);
155 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
156}
157
158
159void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
160 // Stack layout on entry:
161 //
162 // [rsp + kPointerSize]: constant elements.
163 // [rsp + (2 * kPointerSize)]: literal index.
164 // [rsp + (3 * kPointerSize)]: literals array.
165
166 // All sizes here are multiples of kPointerSize.
167 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
168 int size = JSArray::kSize + elements_size;
169
170 // Load boilerplate object into rcx and check if we need to create a
171 // boilerplate.
172 Label slow_case;
173 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
174 __ movq(rax, Operand(rsp, 2 * kPointerSize));
175 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
176 __ movq(rcx,
177 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
178 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
179 __ j(equal, &slow_case);
180
181 if (FLAG_debug_code) {
182 const char* message;
183 Heap::RootListIndex expected_map_index;
184 if (mode_ == CLONE_ELEMENTS) {
185 message = "Expected (writable) fixed array";
186 expected_map_index = Heap::kFixedArrayMapRootIndex;
187 } else {
188 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
189 message = "Expected copy-on-write fixed array";
190 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
191 }
192 __ push(rcx);
193 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
194 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
195 expected_map_index);
196 __ Assert(equal, message);
197 __ pop(rcx);
198 }
199
200 // Allocate both the JS array and the elements array in one big
201 // allocation. This avoids multiple limit checks.
202 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
203
204 // Copy the JS array part.
205 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
206 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
207 __ movq(rbx, FieldOperand(rcx, i));
208 __ movq(FieldOperand(rax, i), rbx);
209 }
210 }
211
212 if (length_ > 0) {
213 // Get hold of the elements array of the boilerplate and setup the
214 // elements pointer in the resulting object.
215 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
216 __ lea(rdx, Operand(rax, JSArray::kSize));
217 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
218
219 // Copy the elements array.
220 for (int i = 0; i < elements_size; i += kPointerSize) {
221 __ movq(rbx, FieldOperand(rcx, i));
222 __ movq(FieldOperand(rdx, i), rbx);
223 }
224 }
225
226 // Return and remove the on-stack parameters.
227 __ ret(3 * kPointerSize);
228
229 __ bind(&slow_case);
230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
231}
232
233
234void ToBooleanStub::Generate(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000235 Label false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100236 __ movq(rax, Operand(rsp, 1 * kPointerSize));
237
Ben Murdoch257744e2011-11-30 15:57:28 +0000238 // undefined -> false
239 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
240 __ j(equal, &false_result);
241
242 // Boolean -> its value
243 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
244 __ j(equal, &false_result);
245 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
246 __ j(equal, &true_result);
247
248 // Smis: 0 -> false, all other -> true
249 __ Cmp(rax, Smi::FromInt(0));
250 __ j(equal, &false_result);
251 Condition is_smi = __ CheckSmi(rax);
252 __ j(is_smi, &true_result);
253
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100254 // 'null' => false.
255 __ CompareRoot(rax, Heap::kNullValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000256 __ j(equal, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100257
258 // Get the map and type of the heap object.
259 // We don't use CmpObjectType because we manipulate the type field.
260 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
261 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
262
263 // Undetectable => false.
264 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
265 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +0000266 __ j(not_zero, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100267
268 // JavaScript object => true.
269 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000270 __ j(above_equal, &true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100271
272 // String value => false iff empty.
273 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000274 __ j(above_equal, &not_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
276 __ SmiTest(rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000277 __ j(zero, &false_result, Label::kNear);
278 __ jmp(&true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100279
280 __ bind(&not_string);
281 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000282 __ j(not_equal, &true_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100283 // HeapNumber => false iff +0, -0, or NaN.
284 // These three cases set the zero flag when compared to zero using ucomisd.
Ben Murdoch257744e2011-11-30 15:57:28 +0000285 __ xorps(xmm0, xmm0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100286 __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 __ j(zero, &false_result, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100288 // Fall through to |true_result|.
289
290 // Return 1/0 for true/false in rax.
291 __ bind(&true_result);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100292 __ Set(rax, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100293 __ ret(1 * kPointerSize);
294 __ bind(&false_result);
Steve Block9fac8402011-05-12 15:51:54 +0100295 __ Set(rax, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100296 __ ret(1 * kPointerSize);
297}
298
299
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100300class FloatingPointHelper : public AllStatic {
301 public:
302 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
303 // If the operands are not both numbers, jump to not_numbers.
304 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
305 // NumberOperands assumes both are smis or heap numbers.
306 static void LoadSSE2SmiOperands(MacroAssembler* masm);
307 static void LoadSSE2NumberOperands(MacroAssembler* masm);
308 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
309 Label* not_numbers);
310
311 // Takes the operands in rdx and rax and loads them as integers in rax
312 // and rcx.
313 static void LoadAsIntegers(MacroAssembler* masm,
314 Label* operand_conversion_failure,
315 Register heap_number_map);
316 // As above, but we know the operands to be numbers. In that case,
317 // conversion can't fail.
318 static void LoadNumbersAsIntegers(MacroAssembler* masm);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100319
320 // Tries to convert two values to smis losslessly.
321 // This fails if either argument is not a Smi nor a HeapNumber,
322 // or if it's a HeapNumber with a value that can't be converted
323 // losslessly to a Smi. In that case, control transitions to the
324 // on_not_smis label.
325 // On success, either control goes to the on_success label (if one is
326 // provided), or it falls through at the end of the code (if on_success
327 // is NULL).
328 // On success, both first and second holds Smi tagged values.
329 // One of first or second must be non-Smi when entering.
330 static void NumbersToSmis(MacroAssembler* masm,
331 Register first,
332 Register second,
333 Register scratch1,
334 Register scratch2,
335 Register scratch3,
336 Label* on_success,
337 Label* on_not_smis);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100338};
339
340
Ben Murdoch257744e2011-11-30 15:57:28 +0000341// Get the integer part of a heap number.
342// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
343void IntegerConvert(MacroAssembler* masm,
344 Register result,
345 Register source) {
346 // Result may be rcx. If result and source are the same register, source will
347 // be overwritten.
348 ASSERT(!result.is(rdi) && !result.is(rbx));
349 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
350 // cvttsd2si (32-bit version) directly.
351 Register double_exponent = rbx;
352 Register double_value = rdi;
353 Label done, exponent_63_plus;
354 // Get double and extract exponent.
355 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
356 // Clear result preemptively, in case we need to return zero.
357 __ xorl(result, result);
358 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
359 // Double to remove sign bit, shift exponent down to least significant bits.
360 // and subtract bias to get the unshifted, unbiased exponent.
361 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
362 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
363 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
364 // Check whether the exponent is too big for a 63 bit unsigned integer.
365 __ cmpl(double_exponent, Immediate(63));
366 __ j(above_equal, &exponent_63_plus, Label::kNear);
367 // Handle exponent range 0..62.
368 __ cvttsd2siq(result, xmm0);
369 __ jmp(&done, Label::kNear);
370
371 __ bind(&exponent_63_plus);
372 // Exponent negative or 63+.
373 __ cmpl(double_exponent, Immediate(83));
374 // If exponent negative or above 83, number contains no significant bits in
375 // the range 0..2^31, so result is zero, and rcx already holds zero.
376 __ j(above, &done, Label::kNear);
377
378 // Exponent in rage 63..83.
379 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
380 // the least significant exponent-52 bits.
381
382 // Negate low bits of mantissa if value is negative.
383 __ addq(double_value, double_value); // Move sign bit to carry.
384 __ sbbl(result, result); // And convert carry to -1 in result register.
385 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
386 __ addl(double_value, result);
387 // Do xor in opposite directions depending on where we want the result
388 // (depending on whether result is rcx or not).
389
390 if (result.is(rcx)) {
391 __ xorl(double_value, result);
392 // Left shift mantissa by (exponent - mantissabits - 1) to save the
393 // bits that have positional values below 2^32 (the extra -1 comes from the
394 // doubling done above to move the sign bit into the carry flag).
395 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
396 __ shll_cl(double_value);
397 __ movl(result, double_value);
398 } else {
399 // As the then-branch, but move double-value to result before shifting.
400 __ xorl(result, double_value);
401 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
402 __ shll_cl(result);
403 }
404
405 __ bind(&done);
406}
407
408
409Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) {
410 UnaryOpStub stub(key, type_info);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100411 return stub.GetCode();
412}
413
414
Ben Murdoch257744e2011-11-30 15:57:28 +0000415void UnaryOpStub::Generate(MacroAssembler* masm) {
416 switch (operand_type_) {
417 case UnaryOpIC::UNINITIALIZED:
418 GenerateTypeTransition(masm);
419 break;
420 case UnaryOpIC::SMI:
421 GenerateSmiStub(masm);
422 break;
423 case UnaryOpIC::HEAP_NUMBER:
424 GenerateHeapNumberStub(masm);
425 break;
426 case UnaryOpIC::GENERIC:
427 GenerateGenericStub(masm);
428 break;
429 }
430}
431
432
433void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
434 __ pop(rcx); // Save return address.
435 __ push(rax);
436 // Left and right arguments are now on top.
437 // Push this stub's key. Although the operation and the type info are
438 // encoded into the key, the encoding is opaque, so push them too.
439 __ Push(Smi::FromInt(MinorKey()));
440 __ Push(Smi::FromInt(op_));
441 __ Push(Smi::FromInt(operand_type_));
442
443 __ push(rcx); // Push return address.
444
445 // Patch the caller to an appropriate specialized stub and return the
446 // operation result to the caller of the stub.
447 __ TailCallExternalReference(
448 ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
449 masm->isolate()),
450 4,
451 1);
452}
453
454
455// TODO(svenpanne): Use virtual functions instead of switch.
456void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
457 switch (op_) {
458 case Token::SUB:
459 GenerateSmiStubSub(masm);
460 break;
461 case Token::BIT_NOT:
462 GenerateSmiStubBitNot(masm);
463 break;
464 default:
465 UNREACHABLE();
466 }
467}
468
469
470void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
471 Label slow;
472 GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
473 __ bind(&slow);
474 GenerateTypeTransition(masm);
475}
476
477
478void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
479 Label non_smi;
480 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
481 __ bind(&non_smi);
482 GenerateTypeTransition(masm);
483}
484
485
486void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
487 Label* non_smi,
488 Label* slow,
489 Label::Distance non_smi_near,
490 Label::Distance slow_near) {
491 Label done;
492 __ JumpIfNotSmi(rax, non_smi, non_smi_near);
493 __ SmiNeg(rax, rax, &done, Label::kNear);
494 __ jmp(slow, slow_near);
495 __ bind(&done);
496 __ ret(0);
497}
498
499
500void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
501 Label* non_smi,
502 Label::Distance non_smi_near) {
503 __ JumpIfNotSmi(rax, non_smi, non_smi_near);
504 __ SmiNot(rax, rax);
505 __ ret(0);
506}
507
508
509// TODO(svenpanne): Use virtual functions instead of switch.
510void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
511 switch (op_) {
512 case Token::SUB:
513 GenerateHeapNumberStubSub(masm);
514 break;
515 case Token::BIT_NOT:
516 GenerateHeapNumberStubBitNot(masm);
517 break;
518 default:
519 UNREACHABLE();
520 }
521}
522
523
524void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
525 Label non_smi, slow, call_builtin;
526 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
527 __ bind(&non_smi);
528 GenerateHeapNumberCodeSub(masm, &slow);
529 __ bind(&slow);
530 GenerateTypeTransition(masm);
531 __ bind(&call_builtin);
532 GenerateGenericCodeFallback(masm);
533}
534
535
536void UnaryOpStub::GenerateHeapNumberStubBitNot(
537 MacroAssembler* masm) {
538 Label non_smi, slow;
539 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
540 __ bind(&non_smi);
541 GenerateHeapNumberCodeBitNot(masm, &slow);
542 __ bind(&slow);
543 GenerateTypeTransition(masm);
544}
545
546
547void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
548 Label* slow) {
549 // Check if the operand is a heap number.
550 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
551 Heap::kHeapNumberMapRootIndex);
552 __ j(not_equal, slow);
553
554 // Operand is a float, negate its value by flipping the sign bit.
555 if (mode_ == UNARY_OVERWRITE) {
556 __ Set(kScratchRegister, 0x01);
557 __ shl(kScratchRegister, Immediate(63));
558 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister);
559 } else {
560 // Allocate a heap number before calculating the answer,
561 // so we don't have an untagged double around during GC.
562 Label slow_allocate_heapnumber, heapnumber_allocated;
563 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
564 __ jmp(&heapnumber_allocated);
565
566 __ bind(&slow_allocate_heapnumber);
567 __ EnterInternalFrame();
568 __ push(rax);
569 __ CallRuntime(Runtime::kNumberAlloc, 0);
570 __ movq(rcx, rax);
571 __ pop(rax);
572 __ LeaveInternalFrame();
573 __ bind(&heapnumber_allocated);
574 // rcx: allocated 'empty' number
575
576 // Copy the double value to the new heap number, flipping the sign.
577 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
578 __ Set(kScratchRegister, 0x01);
579 __ shl(kScratchRegister, Immediate(63));
580 __ xor_(rdx, kScratchRegister); // Flip sign.
581 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
582 __ movq(rax, rcx);
583 }
584 __ ret(0);
585}
586
587
588void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
589 Label* slow) {
590 // Check if the operand is a heap number.
591 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
592 Heap::kHeapNumberMapRootIndex);
593 __ j(not_equal, slow);
594
595 // Convert the heap number in rax to an untagged integer in rcx.
596 IntegerConvert(masm, rax, rax);
597
598 // Do the bitwise operation and smi tag the result.
599 __ notl(rax);
600 __ Integer32ToSmi(rax, rax);
601 __ ret(0);
602}
603
604
605// TODO(svenpanne): Use virtual functions instead of switch.
606void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
607 switch (op_) {
608 case Token::SUB:
609 GenerateGenericStubSub(masm);
610 break;
611 case Token::BIT_NOT:
612 GenerateGenericStubBitNot(masm);
613 break;
614 default:
615 UNREACHABLE();
616 }
617}
618
619
620void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
621 Label non_smi, slow;
622 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
623 __ bind(&non_smi);
624 GenerateHeapNumberCodeSub(masm, &slow);
625 __ bind(&slow);
626 GenerateGenericCodeFallback(masm);
627}
628
629
630void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
631 Label non_smi, slow;
632 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
633 __ bind(&non_smi);
634 GenerateHeapNumberCodeBitNot(masm, &slow);
635 __ bind(&slow);
636 GenerateGenericCodeFallback(masm);
637}
638
639
640void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
641 // Handle the slow case by jumping to the JavaScript builtin.
642 __ pop(rcx); // pop return address
643 __ push(rax);
644 __ push(rcx); // push return address
645 switch (op_) {
646 case Token::SUB:
647 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
648 break;
649 case Token::BIT_NOT:
650 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
651 break;
652 default:
653 UNREACHABLE();
654 }
655}
656
657
658const char* UnaryOpStub::GetName() {
659 if (name_ != NULL) return name_;
660 const int kMaxNameLength = 100;
661 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
662 kMaxNameLength);
663 if (name_ == NULL) return "OOM";
664 const char* op_name = Token::Name(op_);
665 const char* overwrite_name = NULL; // Make g++ happy.
666 switch (mode_) {
667 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
668 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
669 }
670
671 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
672 "UnaryOpStub_%s_%s_%s",
673 op_name,
674 overwrite_name,
675 UnaryOpIC::GetName(operand_type_));
676 return name_;
677}
678
679
680Handle<Code> GetBinaryOpStub(int key,
681 BinaryOpIC::TypeInfo type_info,
682 BinaryOpIC::TypeInfo result_type_info) {
683 BinaryOpStub stub(key, type_info, result_type_info);
684 return stub.GetCode();
685}
686
687
688void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100689 __ pop(rcx); // Save return address.
690 __ push(rdx);
691 __ push(rax);
692 // Left and right arguments are now on top.
693 // Push this stub's key. Although the operation and the type info are
694 // encoded into the key, the encoding is opaque, so push them too.
695 __ Push(Smi::FromInt(MinorKey()));
696 __ Push(Smi::FromInt(op_));
697 __ Push(Smi::FromInt(operands_type_));
698
699 __ push(rcx); // Push return address.
700
701 // Patch the caller to an appropriate specialized stub and return the
702 // operation result to the caller of the stub.
703 __ TailCallExternalReference(
Ben Murdoch257744e2011-11-30 15:57:28 +0000704 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
Steve Block44f0eee2011-05-26 01:26:41 +0100705 masm->isolate()),
Ben Murdoch086aeea2011-05-13 15:57:08 +0100706 5,
707 1);
708}
709
710
Ben Murdoch257744e2011-11-30 15:57:28 +0000711void BinaryOpStub::Generate(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100712 switch (operands_type_) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000713 case BinaryOpIC::UNINITIALIZED:
Ben Murdoch086aeea2011-05-13 15:57:08 +0100714 GenerateTypeTransition(masm);
715 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000716 case BinaryOpIC::SMI:
Ben Murdoch086aeea2011-05-13 15:57:08 +0100717 GenerateSmiStub(masm);
718 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000719 case BinaryOpIC::INT32:
Steve Block1e0659c2011-05-24 12:43:12 +0100720 UNREACHABLE();
721 // The int32 case is identical to the Smi case. We avoid creating this
722 // ic state on x64.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100723 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000724 case BinaryOpIC::HEAP_NUMBER:
Ben Murdoch086aeea2011-05-13 15:57:08 +0100725 GenerateHeapNumberStub(masm);
726 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000727 case BinaryOpIC::ODDBALL:
Steve Block44f0eee2011-05-26 01:26:41 +0100728 GenerateOddballStub(masm);
729 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000730 case BinaryOpIC::BOTH_STRING:
731 GenerateBothStringStub(masm);
732 break;
733 case BinaryOpIC::STRING:
Ben Murdoch086aeea2011-05-13 15:57:08 +0100734 GenerateStringStub(masm);
735 break;
Ben Murdoch257744e2011-11-30 15:57:28 +0000736 case BinaryOpIC::GENERIC:
Ben Murdoch086aeea2011-05-13 15:57:08 +0100737 GenerateGeneric(masm);
738 break;
739 default:
740 UNREACHABLE();
741 }
742}
743
744
Ben Murdoch257744e2011-11-30 15:57:28 +0000745const char* BinaryOpStub::GetName() {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100746 if (name_ != NULL) return name_;
747 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +0100748 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
749 kMaxNameLength);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100750 if (name_ == NULL) return "OOM";
751 const char* op_name = Token::Name(op_);
752 const char* overwrite_name;
753 switch (mode_) {
754 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
755 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
756 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
757 default: overwrite_name = "UnknownOverwrite"; break;
758 }
759
760 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Ben Murdoch257744e2011-11-30 15:57:28 +0000761 "BinaryOpStub_%s_%s_%s",
Ben Murdoch086aeea2011-05-13 15:57:08 +0100762 op_name,
763 overwrite_name,
Ben Murdoch257744e2011-11-30 15:57:28 +0000764 BinaryOpIC::GetName(operands_type_));
Ben Murdoch086aeea2011-05-13 15:57:08 +0100765 return name_;
766}
767
768
Ben Murdoch257744e2011-11-30 15:57:28 +0000769void BinaryOpStub::GenerateSmiCode(
770 MacroAssembler* masm,
Ben Murdoch086aeea2011-05-13 15:57:08 +0100771 Label* slow,
772 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
Steve Block1e0659c2011-05-24 12:43:12 +0100773
Ben Murdoch257744e2011-11-30 15:57:28 +0000774 // Arguments to BinaryOpStub are in rdx and rax.
Steve Block1e0659c2011-05-24 12:43:12 +0100775 Register left = rdx;
776 Register right = rax;
777
Ben Murdoch8b112d22011-06-08 16:22:53 +0100778 // We only generate heapnumber answers for overflowing calculations
779 // for the four basic arithmetic operations and logical right shift by 0.
780 bool generate_inline_heapnumber_results =
781 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
782 (op_ == Token::ADD || op_ == Token::SUB ||
783 op_ == Token::MUL || op_ == Token::DIV || op_ == Token::SHR);
Steve Block1e0659c2011-05-24 12:43:12 +0100784
785 // Smi check of both operands. If op is BIT_OR, the check is delayed
786 // until after the OR operation.
787 Label not_smis;
788 Label use_fp_on_smis;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100789 Label fail;
Steve Block1e0659c2011-05-24 12:43:12 +0100790
791 if (op_ != Token::BIT_OR) {
792 Comment smi_check_comment(masm, "-- Smi check arguments");
793 __ JumpIfNotBothSmi(left, right, &not_smis);
794 }
795
Ben Murdoch8b112d22011-06-08 16:22:53 +0100796 Label smi_values;
797 __ bind(&smi_values);
Steve Block1e0659c2011-05-24 12:43:12 +0100798 // Perform the operation.
799 Comment perform_smi(masm, "-- Perform smi operation");
800 switch (op_) {
801 case Token::ADD:
802 ASSERT(right.is(rax));
803 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
804 break;
805
806 case Token::SUB:
807 __ SmiSub(left, left, right, &use_fp_on_smis);
808 __ movq(rax, left);
809 break;
810
811 case Token::MUL:
812 ASSERT(right.is(rax));
813 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
814 break;
815
816 case Token::DIV:
817 // SmiDiv will not accept left in rdx or right in rax.
818 left = rcx;
819 right = rbx;
820 __ movq(rbx, rax);
821 __ movq(rcx, rdx);
822 __ SmiDiv(rax, left, right, &use_fp_on_smis);
823 break;
824
825 case Token::MOD:
826 // SmiMod will not accept left in rdx or right in rax.
827 left = rcx;
828 right = rbx;
829 __ movq(rbx, rax);
830 __ movq(rcx, rdx);
831 __ SmiMod(rax, left, right, &use_fp_on_smis);
832 break;
833
834 case Token::BIT_OR: {
835 ASSERT(right.is(rax));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100836 __ SmiOrIfSmis(right, right, left, &not_smis); // BIT_OR is commutative.
Steve Block1e0659c2011-05-24 12:43:12 +0100837 break;
838 }
839 case Token::BIT_XOR:
840 ASSERT(right.is(rax));
841 __ SmiXor(right, right, left); // BIT_XOR is commutative.
842 break;
843
844 case Token::BIT_AND:
845 ASSERT(right.is(rax));
846 __ SmiAnd(right, right, left); // BIT_AND is commutative.
847 break;
848
849 case Token::SHL:
850 __ SmiShiftLeft(left, left, right);
851 __ movq(rax, left);
852 break;
853
854 case Token::SAR:
855 __ SmiShiftArithmeticRight(left, left, right);
856 __ movq(rax, left);
857 break;
858
859 case Token::SHR:
Ben Murdoch8b112d22011-06-08 16:22:53 +0100860 __ SmiShiftLogicalRight(left, left, right, &use_fp_on_smis);
Steve Block1e0659c2011-05-24 12:43:12 +0100861 __ movq(rax, left);
862 break;
863
864 default:
865 UNREACHABLE();
866 }
867
868 // 5. Emit return of result in rax. Some operations have registers pushed.
869 __ ret(0);
870
Ben Murdoch8b112d22011-06-08 16:22:53 +0100871 if (use_fp_on_smis.is_linked()) {
872 // 6. For some operations emit inline code to perform floating point
873 // operations on known smis (e.g., if the result of the operation
874 // overflowed the smi range).
875 __ bind(&use_fp_on_smis);
876 if (op_ == Token::DIV || op_ == Token::MOD) {
877 // Restore left and right to rdx and rax.
878 __ movq(rdx, rcx);
879 __ movq(rax, rbx);
Steve Block1e0659c2011-05-24 12:43:12 +0100880 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100881
882 if (generate_inline_heapnumber_results) {
883 __ AllocateHeapNumber(rcx, rbx, slow);
884 Comment perform_float(masm, "-- Perform float operation on smis");
885 if (op_ == Token::SHR) {
886 __ SmiToInteger32(left, left);
887 __ cvtqsi2sd(xmm0, left);
888 } else {
889 FloatingPointHelper::LoadSSE2SmiOperands(masm);
890 switch (op_) {
891 case Token::ADD: __ addsd(xmm0, xmm1); break;
892 case Token::SUB: __ subsd(xmm0, xmm1); break;
893 case Token::MUL: __ mulsd(xmm0, xmm1); break;
894 case Token::DIV: __ divsd(xmm0, xmm1); break;
895 default: UNREACHABLE();
896 }
897 }
898 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
899 __ movq(rax, rcx);
900 __ ret(0);
901 } else {
902 __ jmp(&fail);
903 }
Steve Block1e0659c2011-05-24 12:43:12 +0100904 }
905
906 // 7. Non-smi operands reach the end of the code generated by
907 // GenerateSmiCode, and fall through to subsequent code,
908 // with the operands in rdx and rax.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100909 // But first we check if non-smi values are HeapNumbers holding
910 // values that could be smi.
Steve Block1e0659c2011-05-24 12:43:12 +0100911 __ bind(&not_smis);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100912 Comment done_comment(masm, "-- Enter non-smi code");
913 FloatingPointHelper::NumbersToSmis(masm, left, right, rbx, rdi, rcx,
914 &smi_values, &fail);
915 __ jmp(&smi_values);
916 __ bind(&fail);
Steve Block1e0659c2011-05-24 12:43:12 +0100917}
918
919
Ben Murdoch257744e2011-11-30 15:57:28 +0000920void BinaryOpStub::GenerateFloatingPointCode(MacroAssembler* masm,
921 Label* allocation_failure,
922 Label* non_numeric_failure) {
Steve Block1e0659c2011-05-24 12:43:12 +0100923 switch (op_) {
924 case Token::ADD:
925 case Token::SUB:
926 case Token::MUL:
927 case Token::DIV: {
928 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
929
930 switch (op_) {
931 case Token::ADD: __ addsd(xmm0, xmm1); break;
932 case Token::SUB: __ subsd(xmm0, xmm1); break;
933 case Token::MUL: __ mulsd(xmm0, xmm1); break;
934 case Token::DIV: __ divsd(xmm0, xmm1); break;
935 default: UNREACHABLE();
936 }
937 GenerateHeapResultAllocation(masm, allocation_failure);
938 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
939 __ ret(0);
940 break;
941 }
942 case Token::MOD: {
943 // For MOD we jump to the allocation_failure label, to call runtime.
944 __ jmp(allocation_failure);
945 break;
946 }
947 case Token::BIT_OR:
948 case Token::BIT_AND:
949 case Token::BIT_XOR:
950 case Token::SAR:
951 case Token::SHL:
952 case Token::SHR: {
953 Label non_smi_shr_result;
954 Register heap_number_map = r9;
955 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
956 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
957 heap_number_map);
958 switch (op_) {
959 case Token::BIT_OR: __ orl(rax, rcx); break;
960 case Token::BIT_AND: __ andl(rax, rcx); break;
961 case Token::BIT_XOR: __ xorl(rax, rcx); break;
962 case Token::SAR: __ sarl_cl(rax); break;
963 case Token::SHL: __ shll_cl(rax); break;
964 case Token::SHR: {
965 __ shrl_cl(rax);
966 // Check if result is negative. This can only happen for a shift
967 // by zero.
968 __ testl(rax, rax);
969 __ j(negative, &non_smi_shr_result);
970 break;
971 }
972 default: UNREACHABLE();
973 }
974 STATIC_ASSERT(kSmiValueSize == 32);
975 // Tag smi result and return.
976 __ Integer32ToSmi(rax, rax);
977 __ Ret();
978
979 // Logical shift right can produce an unsigned int32 that is not
980 // an int32, and so is not in the smi range. Allocate a heap number
981 // in that case.
982 if (op_ == Token::SHR) {
983 __ bind(&non_smi_shr_result);
984 Label allocation_failed;
985 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
986 // Allocate heap number in new space.
987 // Not using AllocateHeapNumber macro in order to reuse
988 // already loaded heap_number_map.
989 __ AllocateInNewSpace(HeapNumber::kSize,
990 rax,
Steve Block053d10c2011-06-13 19:13:29 +0100991 rdx,
Steve Block1e0659c2011-05-24 12:43:12 +0100992 no_reg,
993 &allocation_failed,
994 TAG_OBJECT);
995 // Set the map.
996 if (FLAG_debug_code) {
997 __ AbortIfNotRootValue(heap_number_map,
998 Heap::kHeapNumberMapRootIndex,
999 "HeapNumberMap register clobbered.");
1000 }
1001 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1002 heap_number_map);
1003 __ cvtqsi2sd(xmm0, rbx);
1004 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1005 __ Ret();
1006
1007 __ bind(&allocation_failed);
1008 // We need tagged values in rdx and rax for the following code,
1009 // not int32 in rax and rcx.
1010 __ Integer32ToSmi(rax, rcx);
Steve Block053d10c2011-06-13 19:13:29 +01001011 __ Integer32ToSmi(rdx, rbx);
Steve Block1e0659c2011-05-24 12:43:12 +01001012 __ jmp(allocation_failure);
1013 }
1014 break;
1015 }
1016 default: UNREACHABLE(); break;
1017 }
1018 // No fall-through from this generated code.
1019 if (FLAG_debug_code) {
1020 __ Abort("Unexpected fall-through in "
Ben Murdoch257744e2011-11-30 15:57:28 +00001021 "BinaryStub::GenerateFloatingPointCode.");
Steve Block1e0659c2011-05-24 12:43:12 +01001022 }
1023}
1024
1025
Ben Murdoch257744e2011-11-30 15:57:28 +00001026void BinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001027 ASSERT(op_ == Token::ADD);
Ben Murdoch257744e2011-11-30 15:57:28 +00001028 Label left_not_string, call_runtime;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001029
Steve Block1e0659c2011-05-24 12:43:12 +01001030 // Registers containing left and right operands respectively.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001031 Register left = rdx;
1032 Register right = rax;
Steve Block1e0659c2011-05-24 12:43:12 +01001033
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001034 // Test if left operand is a string.
Ben Murdoch257744e2011-11-30 15:57:28 +00001035 __ JumpIfSmi(left, &left_not_string, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001036 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001037 __ j(above_equal, &left_not_string, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001038 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1039 GenerateRegisterArgsPush(masm);
1040 __ TailCallStub(&string_add_left_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001041
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001042 // Left operand is not a string, test right.
1043 __ bind(&left_not_string);
Ben Murdoch257744e2011-11-30 15:57:28 +00001044 __ JumpIfSmi(right, &call_runtime, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001045 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001046 __ j(above_equal, &call_runtime, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01001047
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001048 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1049 GenerateRegisterArgsPush(masm);
1050 __ TailCallStub(&string_add_right_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001051
Steve Block1e0659c2011-05-24 12:43:12 +01001052 // Neither argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001053 __ bind(&call_runtime);
Steve Block1e0659c2011-05-24 12:43:12 +01001054}
1055
1056
Ben Murdoch257744e2011-11-30 15:57:28 +00001057void BinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001058 GenerateRegisterArgsPush(masm);
1059 switch (op_) {
1060 case Token::ADD:
1061 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1062 break;
1063 case Token::SUB:
1064 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1065 break;
1066 case Token::MUL:
1067 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1068 break;
1069 case Token::DIV:
1070 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1071 break;
1072 case Token::MOD:
1073 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1074 break;
1075 case Token::BIT_OR:
1076 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1077 break;
1078 case Token::BIT_AND:
1079 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1080 break;
1081 case Token::BIT_XOR:
1082 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1083 break;
1084 case Token::SAR:
1085 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1086 break;
1087 case Token::SHL:
1088 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1089 break;
1090 case Token::SHR:
1091 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1092 break;
1093 default:
1094 UNREACHABLE();
1095 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001096}
1097
1098
Ben Murdoch257744e2011-11-30 15:57:28 +00001099void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001100 Label call_runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00001101 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
1102 result_type_ == BinaryOpIC::SMI) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001103 // Only allow smi results.
1104 GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS);
1105 } else {
1106 // Allow heap number result and don't make a transition if a heap number
1107 // cannot be allocated.
1108 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1109 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001110
Ben Murdoch8b112d22011-06-08 16:22:53 +01001111 // Code falls through if the result is not returned as either a smi or heap
1112 // number.
Steve Block1e0659c2011-05-24 12:43:12 +01001113 GenerateTypeTransition(masm);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001114
1115 if (call_runtime.is_linked()) {
1116 __ bind(&call_runtime);
1117 GenerateCallRuntimeCode(masm);
1118 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001119}
1120
1121
Ben Murdoch257744e2011-11-30 15:57:28 +00001122void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
1123 ASSERT(operands_type_ == BinaryOpIC::STRING);
Steve Block1e0659c2011-05-24 12:43:12 +01001124 ASSERT(op_ == Token::ADD);
1125 GenerateStringAddCode(masm);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001126 // Try to add arguments as strings, otherwise, transition to the generic
Ben Murdoch257744e2011-11-30 15:57:28 +00001127 // BinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001128 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001129}
1130
1131
Ben Murdoch257744e2011-11-30 15:57:28 +00001132void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
1133 Label call_runtime;
1134 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
1135 ASSERT(op_ == Token::ADD);
1136 // If both arguments are strings, call the string add stub.
1137 // Otherwise, do a transition.
1138
1139 // Registers containing left and right operands respectively.
1140 Register left = rdx;
1141 Register right = rax;
1142
1143 // Test if left operand is a string.
1144 __ JumpIfSmi(left, &call_runtime);
1145 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
1146 __ j(above_equal, &call_runtime);
1147
1148 // Test if right operand is a string.
1149 __ JumpIfSmi(right, &call_runtime);
1150 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
1151 __ j(above_equal, &call_runtime);
1152
1153 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1154 GenerateRegisterArgsPush(masm);
1155 __ TailCallStub(&string_add_stub);
1156
1157 __ bind(&call_runtime);
1158 GenerateTypeTransition(masm);
1159}
1160
1161
1162void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
Steve Block44f0eee2011-05-26 01:26:41 +01001163 Label call_runtime;
1164
1165 if (op_ == Token::ADD) {
1166 // Handle string addition here, because it is the only operation
1167 // that does not do a ToNumber conversion on the operands.
1168 GenerateStringAddCode(masm);
1169 }
1170
1171 // Convert oddball arguments to numbers.
Ben Murdoch257744e2011-11-30 15:57:28 +00001172 Label check, done;
Steve Block44f0eee2011-05-26 01:26:41 +01001173 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001174 __ j(not_equal, &check, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001175 if (Token::IsBitOp(op_)) {
1176 __ xor_(rdx, rdx);
1177 } else {
1178 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1179 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001180 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001181 __ bind(&check);
1182 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001183 __ j(not_equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001184 if (Token::IsBitOp(op_)) {
1185 __ xor_(rax, rax);
1186 } else {
1187 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1188 }
1189 __ bind(&done);
1190
1191 GenerateHeapNumberStub(masm);
1192}
1193
1194
Ben Murdoch257744e2011-11-30 15:57:28 +00001195void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001196 Label gc_required, not_number;
1197 GenerateFloatingPointCode(masm, &gc_required, &not_number);
1198
1199 __ bind(&not_number);
1200 GenerateTypeTransition(masm);
1201
1202 __ bind(&gc_required);
1203 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001204}
1205
1206
Ben Murdoch257744e2011-11-30 15:57:28 +00001207void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001208 Label call_runtime, call_string_add_or_runtime;
1209
1210 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1211
1212 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1213
1214 __ bind(&call_string_add_or_runtime);
1215 if (op_ == Token::ADD) {
1216 GenerateStringAddCode(masm);
1217 }
1218
1219 __ bind(&call_runtime);
1220 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001221}
1222
1223
Ben Murdoch257744e2011-11-30 15:57:28 +00001224void BinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
1225 Label* alloc_failure) {
Steve Block1e0659c2011-05-24 12:43:12 +01001226 Label skip_allocation;
1227 OverwriteMode mode = mode_;
1228 switch (mode) {
1229 case OVERWRITE_LEFT: {
1230 // If the argument in rdx is already an object, we skip the
1231 // allocation of a heap number.
1232 __ JumpIfNotSmi(rdx, &skip_allocation);
1233 // Allocate a heap number for the result. Keep eax and edx intact
1234 // for the possible runtime call.
1235 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1236 // Now rdx can be overwritten losing one of the arguments as we are
1237 // now done and will not need it any more.
1238 __ movq(rdx, rbx);
1239 __ bind(&skip_allocation);
1240 // Use object in rdx as a result holder
1241 __ movq(rax, rdx);
1242 break;
1243 }
1244 case OVERWRITE_RIGHT:
1245 // If the argument in rax is already an object, we skip the
1246 // allocation of a heap number.
1247 __ JumpIfNotSmi(rax, &skip_allocation);
1248 // Fall through!
1249 case NO_OVERWRITE:
1250 // Allocate a heap number for the result. Keep rax and rdx intact
1251 // for the possible runtime call.
1252 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1253 // Now rax can be overwritten losing one of the arguments as we are
1254 // now done and will not need it any more.
1255 __ movq(rax, rbx);
1256 __ bind(&skip_allocation);
1257 break;
1258 default: UNREACHABLE();
1259 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001260}
1261
1262
Ben Murdoch257744e2011-11-30 15:57:28 +00001263void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001264 __ pop(rcx);
1265 __ push(rdx);
1266 __ push(rax);
1267 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001268}
1269
1270
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001271void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001272 // TAGGED case:
1273 // Input:
1274 // rsp[8]: argument (should be number).
1275 // rsp[0]: return address.
1276 // Output:
1277 // rax: tagged double result.
1278 // UNTAGGED case:
1279 // Input::
1280 // rsp[0]: return address.
1281 // xmm1: untagged double input argument
1282 // Output:
1283 // xmm1: untagged double result.
1284
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001285 Label runtime_call;
1286 Label runtime_call_clear_stack;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001287 Label skip_cache;
1288 const bool tagged = (argument_type_ == TAGGED);
1289 if (tagged) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001290 Label input_not_smi, loaded;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001291 // Test that rax is a number.
1292 __ movq(rax, Operand(rsp, kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001293 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001294 // Input is a smi. Untag and load it onto the FPU stack.
1295 // Then load the bits of the double into rbx.
1296 __ SmiToInteger32(rax, rax);
1297 __ subq(rsp, Immediate(kDoubleSize));
1298 __ cvtlsi2sd(xmm1, rax);
1299 __ movsd(Operand(rsp, 0), xmm1);
1300 __ movq(rbx, xmm1);
1301 __ movq(rdx, xmm1);
1302 __ fld_d(Operand(rsp, 0));
1303 __ addq(rsp, Immediate(kDoubleSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001304 __ jmp(&loaded, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001305
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001306 __ bind(&input_not_smi);
1307 // Check if input is a HeapNumber.
1308 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
1309 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1310 __ j(not_equal, &runtime_call);
1311 // Input is a HeapNumber. Push it on the FPU stack and load its
1312 // bits into rbx.
1313 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1314 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1315 __ movq(rdx, rbx);
1316
1317 __ bind(&loaded);
1318 } else { // UNTAGGED.
1319 __ movq(rbx, xmm1);
1320 __ movq(rdx, xmm1);
1321 }
1322
1323 // ST[0] == double value, if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001324 // rbx = bits of double value.
1325 // rdx = also bits of double value.
1326 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
1327 // h = h0 = bits ^ (bits >> 32);
1328 // h ^= h >> 16;
1329 // h ^= h >> 8;
1330 // h = h & (cacheSize - 1);
1331 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
1332 __ sar(rdx, Immediate(32));
1333 __ xorl(rdx, rbx);
1334 __ movl(rcx, rdx);
1335 __ movl(rax, rdx);
1336 __ movl(rdi, rdx);
1337 __ sarl(rdx, Immediate(8));
1338 __ sarl(rcx, Immediate(16));
1339 __ sarl(rax, Immediate(24));
1340 __ xorl(rcx, rdx);
1341 __ xorl(rax, rdi);
1342 __ xorl(rcx, rax);
Steve Block44f0eee2011-05-26 01:26:41 +01001343 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
1344 __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001345
1346 // ST[0] == double value.
1347 // rbx = bits of double value.
1348 // rcx = TranscendentalCache::hash(double value).
Steve Block44f0eee2011-05-26 01:26:41 +01001349 ExternalReference cache_array =
1350 ExternalReference::transcendental_cache_array_address(masm->isolate());
1351 __ movq(rax, cache_array);
1352 int cache_array_index =
1353 type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
1354 __ movq(rax, Operand(rax, cache_array_index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001355 // rax points to the cache for the type type_.
1356 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1357 __ testq(rax, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001358 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001359#ifdef DEBUG
1360 // Check that the layout of cache elements match expectations.
1361 { // NOLINT - doesn't like a single brace on a line.
Steve Block44f0eee2011-05-26 01:26:41 +01001362 TranscendentalCache::SubCache::Element test_elem[2];
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001363 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1364 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1365 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1366 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1367 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1368 // Two uint_32's and a pointer per element.
1369 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1370 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1371 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1372 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1373 }
1374#endif
1375 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1376 __ addl(rcx, rcx);
1377 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1378 // Check if cache matches: Double value is stored in uint32_t[2] array.
Ben Murdoch257744e2011-11-30 15:57:28 +00001379 Label cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001380 __ cmpq(rbx, Operand(rcx, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001381 __ j(not_equal, &cache_miss, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001382 // Cache hit!
1383 __ movq(rax, Operand(rcx, 2 * kIntSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001384 if (tagged) {
1385 __ fstp(0); // Clear FPU stack.
1386 __ ret(kPointerSize);
1387 } else { // UNTAGGED.
1388 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1389 __ Ret();
1390 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001391
1392 __ bind(&cache_miss);
1393 // Update cache with new value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001394 if (tagged) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001395 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001396 } else { // UNTAGGED.
1397 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1398 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1399 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1400 }
1401 GenerateOperation(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001402 __ movq(Operand(rcx, 0), rbx);
1403 __ movq(Operand(rcx, 2 * kIntSize), rax);
1404 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001405 if (tagged) {
1406 __ ret(kPointerSize);
1407 } else { // UNTAGGED.
1408 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1409 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001410
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001411 // Skip cache and return answer directly, only in untagged case.
1412 __ bind(&skip_cache);
1413 __ subq(rsp, Immediate(kDoubleSize));
1414 __ movsd(Operand(rsp, 0), xmm1);
1415 __ fld_d(Operand(rsp, 0));
1416 GenerateOperation(masm);
1417 __ fstp_d(Operand(rsp, 0));
1418 __ movsd(xmm1, Operand(rsp, 0));
1419 __ addq(rsp, Immediate(kDoubleSize));
1420 // We return the value in xmm1 without adding it to the cache, but
1421 // we cause a scavenging GC so that future allocations will succeed.
1422 __ EnterInternalFrame();
1423 // Allocate an unused object bigger than a HeapNumber.
1424 __ Push(Smi::FromInt(2 * kDoubleSize));
1425 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1426 __ LeaveInternalFrame();
1427 __ Ret();
1428 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001429
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001430 // Call runtime, doing whatever allocation and cleanup is necessary.
1431 if (tagged) {
1432 __ bind(&runtime_call_clear_stack);
1433 __ fstp(0);
1434 __ bind(&runtime_call);
Steve Block44f0eee2011-05-26 01:26:41 +01001435 __ TailCallExternalReference(
1436 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001437 } else { // UNTAGGED.
1438 __ bind(&runtime_call_clear_stack);
1439 __ bind(&runtime_call);
1440 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1441 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1442 __ EnterInternalFrame();
1443 __ push(rax);
1444 __ CallRuntime(RuntimeFunction(), 1);
1445 __ LeaveInternalFrame();
1446 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1447 __ Ret();
1448 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001449}
1450
1451
1452Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1453 switch (type_) {
1454 // Add more cases when necessary.
1455 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1456 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001457 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001458 default:
1459 UNIMPLEMENTED();
1460 return Runtime::kAbort;
1461 }
1462}
1463
1464
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001465void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001466 // Registers:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001467 // rax: Newly allocated HeapNumber, which must be preserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001468 // rbx: Bits of input double. Must be preserved.
1469 // rcx: Pointer to cache entry. Must be preserved.
1470 // st(0): Input double
1471 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001472 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
1473 // Both fsin and fcos require arguments in the range +/-2^63 and
1474 // return NaN for infinities and NaN. They can share all code except
1475 // the actual fsin/fcos operation.
1476 Label in_range;
1477 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
1478 // work. We must reduce it to the appropriate range.
1479 __ movq(rdi, rbx);
1480 // Move exponent and sign bits to low bits.
1481 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
1482 // Remove sign bit.
1483 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
1484 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
1485 __ cmpl(rdi, Immediate(supported_exponent_limit));
1486 __ j(below, &in_range);
1487 // Check for infinity and NaN. Both return NaN for sin.
1488 __ cmpl(rdi, Immediate(0x7ff));
Ben Murdoch257744e2011-11-30 15:57:28 +00001489 Label non_nan_result;
1490 __ j(not_equal, &non_nan_result, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001491 // Input is +/-Infinity or NaN. Result is NaN.
1492 __ fstp(0);
1493 __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
1494 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
1495 __ jmp(&done);
1496
1497 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001498
Ben Murdochb0fe1622011-05-05 13:52:32 +01001499 // Use fpmod to restrict argument to the range +/-2*PI.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001500 __ movq(rdi, rax); // Save rax before using fnstsw_ax.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001501 __ fldpi();
1502 __ fadd(0);
1503 __ fld(1);
1504 // FPU Stack: input, 2*pi, input.
1505 {
1506 Label no_exceptions;
1507 __ fwait();
1508 __ fnstsw_ax();
1509 // Clear if Illegal Operand or Zero Division exceptions are set.
1510 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1511 __ j(zero, &no_exceptions);
1512 __ fnclex();
1513 __ bind(&no_exceptions);
1514 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001515
Ben Murdochb0fe1622011-05-05 13:52:32 +01001516 // Compute st(0) % st(1)
1517 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001518 Label partial_remainder_loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001519 __ bind(&partial_remainder_loop);
1520 __ fprem1();
1521 __ fwait();
1522 __ fnstsw_ax();
1523 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1524 // If C2 is set, computation only has partial result. Loop to
1525 // continue computation.
1526 __ j(not_zero, &partial_remainder_loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001527 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001528 // FPU Stack: input, 2*pi, input % 2*pi
1529 __ fstp(2);
1530 // FPU Stack: input % 2*pi, 2*pi,
1531 __ fstp(0);
1532 // FPU Stack: input % 2*pi
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001533 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001534 __ bind(&in_range);
1535 switch (type_) {
1536 case TranscendentalCache::SIN:
1537 __ fsin();
1538 break;
1539 case TranscendentalCache::COS:
1540 __ fcos();
1541 break;
1542 default:
1543 UNREACHABLE();
1544 }
1545 __ bind(&done);
1546 } else {
1547 ASSERT(type_ == TranscendentalCache::LOG);
1548 __ fldln2();
1549 __ fxch();
1550 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001551 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001552}
1553
1554
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001555// Input: rdx, rax are the left and right objects of a bit op.
1556// Output: rax, rcx are left and right integers for a bit op.
1557void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1558 // Check float operands.
1559 Label done;
1560 Label rax_is_smi;
1561 Label rax_is_object;
1562 Label rdx_is_object;
1563
1564 __ JumpIfNotSmi(rdx, &rdx_is_object);
1565 __ SmiToInteger32(rdx, rdx);
1566 __ JumpIfSmi(rax, &rax_is_smi);
1567
1568 __ bind(&rax_is_object);
1569 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx.
1570 __ jmp(&done);
1571
1572 __ bind(&rdx_is_object);
1573 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx.
1574 __ JumpIfNotSmi(rax, &rax_is_object);
1575 __ bind(&rax_is_smi);
1576 __ SmiToInteger32(rcx, rax);
1577
1578 __ bind(&done);
1579 __ movl(rax, rdx);
1580}
1581
1582
1583// Input: rdx, rax are the left and right objects of a bit op.
1584// Output: rax, rcx are left and right integers for a bit op.
Steve Block1e0659c2011-05-24 12:43:12 +01001585// Jump to conversion_failure: rdx and rax are unchanged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001586void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1587 Label* conversion_failure,
1588 Register heap_number_map) {
1589 // Check float operands.
1590 Label arg1_is_object, check_undefined_arg1;
1591 Label arg2_is_object, check_undefined_arg2;
1592 Label load_arg2, done;
1593
1594 __ JumpIfNotSmi(rdx, &arg1_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001595 __ SmiToInteger32(r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001596 __ jmp(&load_arg2);
1597
1598 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1599 __ bind(&check_undefined_arg1);
1600 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1601 __ j(not_equal, conversion_failure);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001602 __ Set(r8, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001603 __ jmp(&load_arg2);
1604
1605 __ bind(&arg1_is_object);
1606 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1607 __ j(not_equal, &check_undefined_arg1);
Steve Block1e0659c2011-05-24 12:43:12 +01001608 // Get the untagged integer version of the rdx heap number in rcx.
1609 IntegerConvert(masm, r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001610
Steve Block1e0659c2011-05-24 12:43:12 +01001611 // Here r8 has the untagged integer, rax has a Smi or a heap number.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001612 __ bind(&load_arg2);
1613 // Test if arg2 is a Smi.
1614 __ JumpIfNotSmi(rax, &arg2_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001615 __ SmiToInteger32(rcx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001616 __ jmp(&done);
1617
1618 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1619 __ bind(&check_undefined_arg2);
1620 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1621 __ j(not_equal, conversion_failure);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001622 __ Set(rcx, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001623 __ jmp(&done);
1624
1625 __ bind(&arg2_is_object);
1626 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1627 __ j(not_equal, &check_undefined_arg2);
1628 // Get the untagged integer version of the rax heap number in rcx.
1629 IntegerConvert(masm, rcx, rax);
1630 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01001631 __ movl(rax, r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001632}
1633
1634
1635void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1636 __ SmiToInteger32(kScratchRegister, rdx);
1637 __ cvtlsi2sd(xmm0, kScratchRegister);
1638 __ SmiToInteger32(kScratchRegister, rax);
1639 __ cvtlsi2sd(xmm1, kScratchRegister);
1640}
1641
1642
1643void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1644 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1645 // Load operand in rdx into xmm0.
1646 __ JumpIfSmi(rdx, &load_smi_rdx);
1647 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1648 // Load operand in rax into xmm1.
1649 __ JumpIfSmi(rax, &load_smi_rax);
1650 __ bind(&load_nonsmi_rax);
1651 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1652 __ jmp(&done);
1653
1654 __ bind(&load_smi_rdx);
1655 __ SmiToInteger32(kScratchRegister, rdx);
1656 __ cvtlsi2sd(xmm0, kScratchRegister);
1657 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1658
1659 __ bind(&load_smi_rax);
1660 __ SmiToInteger32(kScratchRegister, rax);
1661 __ cvtlsi2sd(xmm1, kScratchRegister);
1662
1663 __ bind(&done);
1664}
1665
1666
1667void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1668 Label* not_numbers) {
1669 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1670 // Load operand in rdx into xmm0, or branch to not_numbers.
1671 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
1672 __ JumpIfSmi(rdx, &load_smi_rdx);
1673 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
1674 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
1675 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1676 // Load operand in rax into xmm1, or branch to not_numbers.
1677 __ JumpIfSmi(rax, &load_smi_rax);
1678
1679 __ bind(&load_nonsmi_rax);
1680 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1681 __ j(not_equal, not_numbers);
1682 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1683 __ jmp(&done);
1684
1685 __ bind(&load_smi_rdx);
1686 __ SmiToInteger32(kScratchRegister, rdx);
1687 __ cvtlsi2sd(xmm0, kScratchRegister);
1688 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1689
1690 __ bind(&load_smi_rax);
1691 __ SmiToInteger32(kScratchRegister, rax);
1692 __ cvtlsi2sd(xmm1, kScratchRegister);
1693 __ bind(&done);
1694}
1695
1696
Ben Murdoch8b112d22011-06-08 16:22:53 +01001697void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1698 Register first,
1699 Register second,
1700 Register scratch1,
1701 Register scratch2,
1702 Register scratch3,
1703 Label* on_success,
1704 Label* on_not_smis) {
1705 Register heap_number_map = scratch3;
1706 Register smi_result = scratch1;
1707 Label done;
1708
1709 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1710
Ben Murdoch257744e2011-11-30 15:57:28 +00001711 Label first_smi;
1712 __ JumpIfSmi(first, &first_smi, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001713 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map);
1714 __ j(not_equal, on_not_smis);
1715 // Convert HeapNumber to smi if possible.
1716 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset));
1717 __ movq(scratch2, xmm0);
1718 __ cvttsd2siq(smi_result, xmm0);
1719 // Check if conversion was successful by converting back and
1720 // comparing to the original double's bits.
1721 __ cvtlsi2sd(xmm1, smi_result);
1722 __ movq(kScratchRegister, xmm1);
1723 __ cmpq(scratch2, kScratchRegister);
1724 __ j(not_equal, on_not_smis);
1725 __ Integer32ToSmi(first, smi_result);
1726
Ben Murdoch8b112d22011-06-08 16:22:53 +01001727 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done);
1728 __ bind(&first_smi);
1729 if (FLAG_debug_code) {
1730 // Second should be non-smi if we get here.
1731 __ AbortIfSmi(second);
1732 }
1733 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map);
1734 __ j(not_equal, on_not_smis);
1735 // Convert second to smi, if possible.
1736 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset));
1737 __ movq(scratch2, xmm0);
1738 __ cvttsd2siq(smi_result, xmm0);
1739 __ cvtlsi2sd(xmm1, smi_result);
1740 __ movq(kScratchRegister, xmm1);
1741 __ cmpq(scratch2, kScratchRegister);
1742 __ j(not_equal, on_not_smis);
1743 __ Integer32ToSmi(second, smi_result);
1744 if (on_success != NULL) {
1745 __ jmp(on_success);
1746 } else {
1747 __ bind(&done);
1748 }
1749}
1750
1751
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001752void MathPowStub::Generate(MacroAssembler* masm) {
1753 // Registers are used as follows:
1754 // rdx = base
1755 // rax = exponent
1756 // rcx = temporary, result
1757
1758 Label allocate_return, call_runtime;
1759
1760 // Load input parameters.
1761 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1762 __ movq(rax, Operand(rsp, 1 * kPointerSize));
1763
1764 // Save 1 in xmm3 - we need this several times later on.
Ben Murdoch8b112d22011-06-08 16:22:53 +01001765 __ Set(rcx, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001766 __ cvtlsi2sd(xmm3, rcx);
1767
1768 Label exponent_nonsmi;
1769 Label base_nonsmi;
1770 // If the exponent is a heap number go to that specific case.
1771 __ JumpIfNotSmi(rax, &exponent_nonsmi);
1772 __ JumpIfNotSmi(rdx, &base_nonsmi);
1773
1774 // Optimized version when both exponent and base are smis.
1775 Label powi;
1776 __ SmiToInteger32(rdx, rdx);
1777 __ cvtlsi2sd(xmm0, rdx);
1778 __ jmp(&powi);
1779 // Exponent is a smi and base is a heapnumber.
1780 __ bind(&base_nonsmi);
1781 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
1782 Heap::kHeapNumberMapRootIndex);
1783 __ j(not_equal, &call_runtime);
1784
1785 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1786
1787 // Optimized version of pow if exponent is a smi.
1788 // xmm0 contains the base.
1789 __ bind(&powi);
1790 __ SmiToInteger32(rax, rax);
1791
1792 // Save exponent in base as we need to check if exponent is negative later.
1793 // We know that base and exponent are in different registers.
1794 __ movq(rdx, rax);
1795
1796 // Get absolute value of exponent.
Ben Murdoch257744e2011-11-30 15:57:28 +00001797 Label no_neg;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001798 __ cmpl(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001799 __ j(greater_equal, &no_neg, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001800 __ negl(rax);
1801 __ bind(&no_neg);
1802
1803 // Load xmm1 with 1.
Ben Murdoch257744e2011-11-30 15:57:28 +00001804 __ movaps(xmm1, xmm3);
1805 Label while_true;
1806 Label no_multiply;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001807
1808 __ bind(&while_true);
1809 __ shrl(rax, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001810 __ j(not_carry, &no_multiply, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001811 __ mulsd(xmm1, xmm0);
1812 __ bind(&no_multiply);
1813 __ mulsd(xmm0, xmm0);
1814 __ j(not_zero, &while_true);
1815
1816 // Base has the original value of the exponent - if the exponent is
1817 // negative return 1/result.
1818 __ testl(rdx, rdx);
1819 __ j(positive, &allocate_return);
1820 // Special case if xmm1 has reached infinity.
1821 __ divsd(xmm3, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001822 __ movaps(xmm1, xmm3);
1823 __ xorps(xmm0, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001824 __ ucomisd(xmm0, xmm1);
1825 __ j(equal, &call_runtime);
1826
1827 __ jmp(&allocate_return);
1828
1829 // Exponent (or both) is a heapnumber - no matter what we should now work
1830 // on doubles.
1831 __ bind(&exponent_nonsmi);
1832 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1833 Heap::kHeapNumberMapRootIndex);
1834 __ j(not_equal, &call_runtime);
1835 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1836 // Test if exponent is nan.
1837 __ ucomisd(xmm1, xmm1);
1838 __ j(parity_even, &call_runtime);
1839
Ben Murdoch257744e2011-11-30 15:57:28 +00001840 Label base_not_smi, handle_special_cases;
1841 __ JumpIfNotSmi(rdx, &base_not_smi, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001842 __ SmiToInteger32(rdx, rdx);
1843 __ cvtlsi2sd(xmm0, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001844 __ jmp(&handle_special_cases, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001845
1846 __ bind(&base_not_smi);
1847 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
1848 Heap::kHeapNumberMapRootIndex);
1849 __ j(not_equal, &call_runtime);
1850 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
1851 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
1852 __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
1853 // base is NaN or +/-Infinity
1854 __ j(greater_equal, &call_runtime);
1855 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1856
1857 // base is in xmm0 and exponent is in xmm1.
1858 __ bind(&handle_special_cases);
Ben Murdoch257744e2011-11-30 15:57:28 +00001859 Label not_minus_half;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001860 // Test for -0.5.
1861 // Load xmm2 with -0.5.
1862 __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
1863 __ movq(xmm2, rcx);
1864 // xmm2 now has -0.5.
1865 __ ucomisd(xmm2, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001866 __ j(not_equal, &not_minus_half, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001867
1868 // Calculates reciprocal of square root.
1869 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00001870 __ xorps(xmm1, xmm1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001871 __ addsd(xmm1, xmm0);
1872 __ sqrtsd(xmm1, xmm1);
1873 __ divsd(xmm3, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001874 __ movaps(xmm1, xmm3);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001875 __ jmp(&allocate_return);
1876
1877 // Test for 0.5.
1878 __ bind(&not_minus_half);
1879 // Load xmm2 with 0.5.
1880 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
1881 __ addsd(xmm2, xmm3);
1882 // xmm2 now has 0.5.
1883 __ ucomisd(xmm2, xmm1);
1884 __ j(not_equal, &call_runtime);
1885 // Calculates square root.
1886 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch257744e2011-11-30 15:57:28 +00001887 __ xorps(xmm1, xmm1);
1888 __ addsd(xmm1, xmm0); // Convert -0 to 0.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001889 __ sqrtsd(xmm1, xmm1);
1890
1891 __ bind(&allocate_return);
1892 __ AllocateHeapNumber(rcx, rax, &call_runtime);
1893 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
1894 __ movq(rax, rcx);
1895 __ ret(2 * kPointerSize);
1896
1897 __ bind(&call_runtime);
1898 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1899}
1900
1901
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001902void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1903 // The key is in rdx and the parameter count is in rax.
1904
1905 // The displacement is used for skipping the frame pointer on the
1906 // stack. It is the offset of the last parameter (if any) relative
1907 // to the frame pointer.
1908 static const int kDisplacement = 1 * kPointerSize;
1909
1910 // Check that the key is a smi.
1911 Label slow;
1912 __ JumpIfNotSmi(rdx, &slow);
1913
Steve Block44f0eee2011-05-26 01:26:41 +01001914 // Check if the calling frame is an arguments adaptor frame. We look at the
1915 // context offset, and if the frame is not a regular one, then we find a
1916 // Smi instead of the context. We can't use SmiCompare here, because that
1917 // only works for comparing two smis.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001918 Label adaptor;
1919 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01001920 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
1921 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001922 __ j(equal, &adaptor);
1923
1924 // Check index against formal parameters count limit passed in
1925 // through register rax. Use unsigned comparison to get negative
1926 // check for free.
1927 __ cmpq(rdx, rax);
1928 __ j(above_equal, &slow);
1929
1930 // Read the argument from the stack and return it.
1931 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
1932 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
1933 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
1934 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
1935 __ Ret();
1936
1937 // Arguments adaptor case: Check index against actual arguments
1938 // limit found in the arguments adaptor frame. Use unsigned
1939 // comparison to get negative check for free.
1940 __ bind(&adaptor);
1941 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1942 __ cmpq(rdx, rcx);
1943 __ j(above_equal, &slow);
1944
1945 // Read the argument from the stack and return it.
1946 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
1947 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
1948 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
1949 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
1950 __ Ret();
1951
1952 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1953 // by calling the runtime system.
1954 __ bind(&slow);
1955 __ pop(rbx); // Return address.
1956 __ push(rdx);
1957 __ push(rbx);
1958 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1959}
1960
1961
1962void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
1963 // rsp[0] : return address
1964 // rsp[8] : number of parameters
1965 // rsp[16] : receiver displacement
1966 // rsp[24] : function
1967
1968 // The displacement is used for skipping the return address and the
1969 // frame pointer on the stack. It is the offset of the last
1970 // parameter (if any) relative to the frame pointer.
1971 static const int kDisplacement = 2 * kPointerSize;
1972
1973 // Check if the calling frame is an arguments adaptor frame.
1974 Label adaptor_frame, try_allocate, runtime;
1975 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01001976 __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
1977 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001978 __ j(equal, &adaptor_frame);
1979
1980 // Get the length from the frame.
1981 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
1982 __ jmp(&try_allocate);
1983
1984 // Patch the arguments.length and the parameters pointer.
1985 __ bind(&adaptor_frame);
1986 __ SmiToInteger32(rcx,
1987 Operand(rdx,
1988 ArgumentsAdaptorFrameConstants::kLengthOffset));
1989 // Space on stack must already hold a smi.
1990 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
1991 // Do not clobber the length index for the indexing operation since
1992 // it is used compute the size for allocation later.
1993 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
1994 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
1995
1996 // Try the new space allocation. Start out with computing the size of
1997 // the arguments object and the elements array.
1998 Label add_arguments_object;
1999 __ bind(&try_allocate);
2000 __ testl(rcx, rcx);
2001 __ j(zero, &add_arguments_object);
2002 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2003 __ bind(&add_arguments_object);
Steve Block44f0eee2011-05-26 01:26:41 +01002004 __ addl(rcx, Immediate(GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002005
2006 // Do the allocation of both objects in one go.
2007 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2008
2009 // Get the arguments boilerplate from the current (global) context.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002010 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2011 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002012 __ movq(rdi, Operand(rdi,
2013 Context::SlotOffset(GetArgumentsBoilerplateIndex())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002014
2015 // Copy the JS object part.
2016 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
2017 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2018 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2019 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2020 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2021 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2022 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2023
Steve Block44f0eee2011-05-26 01:26:41 +01002024 if (type_ == NEW_NON_STRICT) {
2025 // Setup the callee in-object property.
2026 ASSERT(Heap::kArgumentsCalleeIndex == 1);
2027 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2028 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2029 Heap::kArgumentsCalleeIndex * kPointerSize),
2030 kScratchRegister);
2031 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002032
2033 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01002034 ASSERT(Heap::kArgumentsLengthIndex == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002035 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002036 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2037 Heap::kArgumentsLengthIndex * kPointerSize),
2038 rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002039
2040 // If there are no actual arguments, we're done.
2041 Label done;
2042 __ SmiTest(rcx);
2043 __ j(zero, &done);
2044
2045 // Get the parameters pointer from the stack and untag the length.
2046 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2047
2048 // Setup the elements pointer in the allocated arguments object and
2049 // initialize the header in the elements fixed array.
Steve Block44f0eee2011-05-26 01:26:41 +01002050 __ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002051 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2052 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2053 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2054 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2055 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2056
2057 // Copy the fixed array slots.
2058 Label loop;
2059 __ bind(&loop);
2060 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2061 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2062 __ addq(rdi, Immediate(kPointerSize));
2063 __ subq(rdx, Immediate(kPointerSize));
2064 __ decl(rcx);
2065 __ j(not_zero, &loop);
2066
2067 // Return and remove the on-stack parameters.
2068 __ bind(&done);
2069 __ ret(3 * kPointerSize);
2070
2071 // Do the runtime call to allocate the arguments object.
2072 __ bind(&runtime);
2073 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2074}
2075
2076
2077void RegExpExecStub::Generate(MacroAssembler* masm) {
2078 // Just jump directly to runtime if native RegExp is not selected at compile
2079 // time or if regexp entry in generated code is turned off runtime switch or
2080 // at compilation.
2081#ifdef V8_INTERPRETED_REGEXP
2082 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2083#else // V8_INTERPRETED_REGEXP
2084 if (!FLAG_regexp_entry_native) {
2085 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2086 return;
2087 }
2088
2089 // Stack frame on entry.
Steve Block1e0659c2011-05-24 12:43:12 +01002090 // rsp[0]: return address
2091 // rsp[8]: last_match_info (expected JSArray)
2092 // rsp[16]: previous index
2093 // rsp[24]: subject string
2094 // rsp[32]: JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002095
2096 static const int kLastMatchInfoOffset = 1 * kPointerSize;
2097 static const int kPreviousIndexOffset = 2 * kPointerSize;
2098 static const int kSubjectOffset = 3 * kPointerSize;
2099 static const int kJSRegExpOffset = 4 * kPointerSize;
2100
2101 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002102 // Ensure that a RegExp stack is allocated.
Steve Block44f0eee2011-05-26 01:26:41 +01002103 Isolate* isolate = masm->isolate();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002104 ExternalReference address_of_regexp_stack_memory_address =
Steve Block44f0eee2011-05-26 01:26:41 +01002105 ExternalReference::address_of_regexp_stack_memory_address(isolate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002106 ExternalReference address_of_regexp_stack_memory_size =
Steve Block44f0eee2011-05-26 01:26:41 +01002107 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2108 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002109 __ testq(kScratchRegister, kScratchRegister);
2110 __ j(zero, &runtime);
2111
2112
2113 // Check that the first argument is a JSRegExp object.
2114 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2115 __ JumpIfSmi(rax, &runtime);
2116 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2117 __ j(not_equal, &runtime);
2118 // Check that the RegExp has been compiled (data contains a fixed array).
Steve Block44f0eee2011-05-26 01:26:41 +01002119 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002120 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +01002121 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002122 __ Check(NegateCondition(is_smi),
2123 "Unexpected type for RegExp data, FixedArray expected");
Steve Block44f0eee2011-05-26 01:26:41 +01002124 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002125 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
2126 }
2127
Steve Block44f0eee2011-05-26 01:26:41 +01002128 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002129 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +01002130 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002131 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2132 __ j(not_equal, &runtime);
2133
Steve Block44f0eee2011-05-26 01:26:41 +01002134 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002135 // Check that the number of captures fit in the static offsets vector buffer.
2136 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +01002137 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002138 // Calculate number of capture registers (number_of_captures + 1) * 2.
2139 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2140 // Check that the static offsets vector buffer is large enough.
2141 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2142 __ j(above, &runtime);
2143
Steve Block44f0eee2011-05-26 01:26:41 +01002144 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002145 // rdx: Number of capture registers
2146 // Check that the second argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002147 __ movq(rdi, Operand(rsp, kSubjectOffset));
2148 __ JumpIfSmi(rdi, &runtime);
2149 Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002150 __ j(NegateCondition(is_string), &runtime);
2151
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002152 // rdi: Subject string.
2153 // rax: RegExp data (FixedArray).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002154 // rdx: Number of capture registers.
2155 // Check that the third argument is a positive smi less than the string
2156 // length. A negative value will be greater (unsigned comparison).
2157 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
2158 __ JumpIfNotSmi(rbx, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002159 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002160 __ j(above_equal, &runtime);
2161
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002162 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002163 // rdx: Number of capture registers
2164 // Check that the fourth object is a JSArray object.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002165 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
2166 __ JumpIfSmi(rdi, &runtime);
2167 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002168 __ j(not_equal, &runtime);
2169 // Check that the JSArray is in fast case.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002170 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
2171 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002172 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
2173 Heap::kFixedArrayMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002174 __ j(not_equal, &runtime);
2175 // Check that the last match info has space for the capture registers and the
2176 // additional information. Ensure no overflow in add.
2177 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002178 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002179 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002180 __ cmpl(rdx, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002181 __ j(greater, &runtime);
2182
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002183 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002184 // Check the representation and encoding of the subject string.
Ben Murdoch257744e2011-11-30 15:57:28 +00002185 Label seq_ascii_string, seq_two_byte_string, check_code;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002186 __ movq(rdi, Operand(rsp, kSubjectOffset));
2187 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002188 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2189 // First check for flat two byte string.
2190 __ andb(rbx, Immediate(
2191 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
2192 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002193 __ j(zero, &seq_two_byte_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002194 // Any other flat string must be a flat ascii string.
2195 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002196 __ j(zero, &seq_ascii_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002197
2198 // Check for flat cons string.
2199 // A flat cons string is a cons string where the second part is the empty
2200 // string. In that case the subject string is just the first part of the cons
2201 // string. Also in this case the first part of the cons string is known to be
2202 // a sequential string or an external string.
2203 STATIC_ASSERT(kExternalStringTag !=0);
2204 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
2205 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2206 __ j(not_zero, &runtime);
2207 // String is a cons string.
Steve Block44f0eee2011-05-26 01:26:41 +01002208 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
2209 Heap::kEmptyStringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002210 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002211 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
2212 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002213 // String is a cons string with empty second part.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002214 // rdi: first part of cons string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002215 // rbx: map of first part of cons string.
2216 // Is first part a flat two byte string?
2217 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2218 Immediate(kStringRepresentationMask | kStringEncodingMask));
2219 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002220 __ j(zero, &seq_two_byte_string, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002221 // Any other flat string must be ascii.
2222 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2223 Immediate(kStringRepresentationMask));
2224 __ j(not_zero, &runtime);
2225
2226 __ bind(&seq_ascii_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002227 // rdi: subject string (sequential ascii)
2228 // rax: RegExp data (FixedArray)
2229 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
2230 __ Set(rcx, 1); // Type is ascii.
Ben Murdoch257744e2011-11-30 15:57:28 +00002231 __ jmp(&check_code, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002232
2233 __ bind(&seq_two_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002234 // rdi: subject string (flat two-byte)
2235 // rax: RegExp data (FixedArray)
2236 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
2237 __ Set(rcx, 0); // Type is two byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002238
2239 __ bind(&check_code);
2240 // Check that the irregexp code has been generated for the actual string
2241 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00002242 // smi (code flushing support)
2243 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002244
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002245 // rdi: subject string
2246 // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002247 // r11: code
2248 // Load used arguments before starting to push arguments for call to native
2249 // RegExp code to avoid handling changing stack height.
2250 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
2251
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002252 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002253 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002254 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002255 // r11: code
2256 // All checks done. Now push arguments for native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01002257 Counters* counters = masm->isolate()->counters();
2258 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002259
Steve Block44f0eee2011-05-26 01:26:41 +01002260 // Isolates: note we add an additional parameter here (isolate pointer).
2261 static const int kRegExpExecuteArguments = 8;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262 int argument_slots_on_stack =
2263 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +01002264 __ EnterApiExitFrame(argument_slots_on_stack);
2265
2266 // Argument 8: Pass current isolate address.
2267 // __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2268 // Immediate(ExternalReference::isolate_address()));
2269 __ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
2270 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2271 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002272
2273 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block44f0eee2011-05-26 01:26:41 +01002274 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002275 Immediate(1));
2276
2277 // Argument 6: Start (high end) of backtracking stack memory area.
2278 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
2279 __ movq(r9, Operand(kScratchRegister, 0));
2280 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2281 __ addq(r9, Operand(kScratchRegister, 0));
2282 // Argument 6 passed in r9 on Linux and on the stack on Windows.
2283#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002284 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002285#endif
2286
2287 // Argument 5: static offsets vector buffer.
Steve Block44f0eee2011-05-26 01:26:41 +01002288 __ LoadAddress(r8,
2289 ExternalReference::address_of_static_offsets_vector(isolate));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002290 // Argument 5 passed in r8 on Linux and on the stack on Windows.
2291#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002292 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002293#endif
2294
2295 // First four arguments are passed in registers on both Linux and Windows.
2296#ifdef _WIN64
2297 Register arg4 = r9;
2298 Register arg3 = r8;
2299 Register arg2 = rdx;
2300 Register arg1 = rcx;
2301#else
2302 Register arg4 = rcx;
2303 Register arg3 = rdx;
2304 Register arg2 = rsi;
2305 Register arg1 = rdi;
2306#endif
2307
2308 // Keep track on aliasing between argX defined above and the registers used.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002309 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002310 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002311 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002312 // r11: code
2313
2314 // Argument 4: End of string data
2315 // Argument 3: Start of string data
Ben Murdoch257744e2011-11-30 15:57:28 +00002316 Label setup_two_byte, setup_rest;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002317 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
Ben Murdoch257744e2011-11-30 15:57:28 +00002318 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002319 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2320 __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
2321 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002322 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002323 __ bind(&setup_two_byte);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002324 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2325 __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
2326 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002327
2328 __ bind(&setup_rest);
2329 // Argument 2: Previous index.
2330 __ movq(arg2, rbx);
2331
2332 // Argument 1: Subject string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002333#ifdef _WIN64
2334 __ movq(arg1, rdi);
2335#else
2336 // Already there in AMD64 calling convention.
2337 ASSERT(arg1.is(rdi));
2338#endif
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002339
2340 // Locate the code entry and call it.
2341 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002342 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002343
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002344 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002345
2346 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +00002347 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002348 Label exception;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002349 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
Ben Murdoch257744e2011-11-30 15:57:28 +00002350 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002351 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002352 __ j(equal, &exception);
2353 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
2354 // If none of the above, it can only be retry.
2355 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002356 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002357
2358 // For failure return null.
2359 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002360 __ ret(4 * kPointerSize);
2361
2362 // Load RegExp data.
2363 __ bind(&success);
2364 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2365 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2366 __ SmiToInteger32(rax,
2367 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2368 // Calculate number of capture registers (number_of_captures + 1) * 2.
2369 __ leal(rdx, Operand(rax, rax, times_1, 2));
2370
2371 // rdx: Number of capture registers
2372 // Load last_match_info which is still known to be a fast case JSArray.
2373 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2374 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
2375
2376 // rbx: last_match_info backing store (FixedArray)
2377 // rdx: number of capture registers
2378 // Store the capture count.
2379 __ Integer32ToSmi(kScratchRegister, rdx);
2380 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2381 kScratchRegister);
2382 // Store last subject and last input.
2383 __ movq(rax, Operand(rsp, kSubjectOffset));
2384 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2385 __ movq(rcx, rbx);
2386 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
2387 __ movq(rax, Operand(rsp, kSubjectOffset));
2388 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2389 __ movq(rcx, rbx);
2390 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
2391
2392 // Get the static offsets vector filled by the native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01002393 __ LoadAddress(rcx,
2394 ExternalReference::address_of_static_offsets_vector(isolate));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002395
2396 // rbx: last_match_info backing store (FixedArray)
2397 // rcx: offsets vector
2398 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00002399 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002400 // Capture register counter starts from number of capture registers and
2401 // counts down until wraping after zero.
2402 __ bind(&next_capture);
2403 __ subq(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002404 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002405 // Read the value from the static offsets vector buffer and make it a smi.
2406 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002407 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002408 // Store the smi value in the last match info.
2409 __ movq(FieldOperand(rbx,
2410 rdx,
2411 times_pointer_size,
2412 RegExpImpl::kFirstCaptureOffset),
2413 rdi);
2414 __ jmp(&next_capture);
2415 __ bind(&done);
2416
2417 // Return last match info.
2418 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2419 __ ret(4 * kPointerSize);
2420
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002421 __ bind(&exception);
2422 // Result must now be exception. If there is no pending exception already a
2423 // stack overflow (on the backtrack stack) was detected in RegExp code but
2424 // haven't created the exception yet. Handle that in the runtime system.
2425 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +01002426 ExternalReference pending_exception_address(
2427 Isolate::k_pending_exception_address, isolate);
2428 Operand pending_exception_operand =
2429 masm->ExternalOperand(pending_exception_address, rbx);
2430 __ movq(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002431 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2432 __ cmpq(rax, rdx);
2433 __ j(equal, &runtime);
Steve Block44f0eee2011-05-26 01:26:41 +01002434 __ movq(pending_exception_operand, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002435
2436 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00002437 Label termination_exception;
2438 __ j(equal, &termination_exception, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002439 __ Throw(rax);
2440
2441 __ bind(&termination_exception);
2442 __ ThrowUncatchable(TERMINATION, rax);
2443
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002444 // Do the runtime call to execute the regexp.
2445 __ bind(&runtime);
2446 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2447#endif // V8_INTERPRETED_REGEXP
2448}
2449
2450
Ben Murdochb0fe1622011-05-05 13:52:32 +01002451void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2452 const int kMaxInlineLength = 100;
2453 Label slowcase;
2454 Label done;
2455 __ movq(r8, Operand(rsp, kPointerSize * 3));
2456 __ JumpIfNotSmi(r8, &slowcase);
2457 __ SmiToInteger32(rbx, r8);
2458 __ cmpl(rbx, Immediate(kMaxInlineLength));
2459 __ j(above, &slowcase);
2460 // Smi-tagging is equivalent to multiplying by 2.
2461 STATIC_ASSERT(kSmiTag == 0);
2462 STATIC_ASSERT(kSmiTagSize == 1);
Steve Block1e0659c2011-05-24 12:43:12 +01002463 // Allocate RegExpResult followed by FixedArray with size in rbx.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002464 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2465 // Elements: [Map][Length][..elements..]
2466 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2467 times_pointer_size,
2468 rbx, // In: Number of elements.
2469 rax, // Out: Start of allocation (tagged).
2470 rcx, // Out: End of allocation.
2471 rdx, // Scratch register
2472 &slowcase,
2473 TAG_OBJECT);
2474 // rax: Start of allocated area, object-tagged.
2475 // rbx: Number of array elements as int32.
2476 // r8: Number of array elements as smi.
2477
2478 // Set JSArray map to global.regexp_result_map().
2479 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
2480 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2481 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2482 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2483
2484 // Set empty properties FixedArray.
Steve Block44f0eee2011-05-26 01:26:41 +01002485 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
2486 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002487
2488 // Set elements to point to FixedArray allocated right after the JSArray.
2489 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2490 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2491
2492 // Set input, index and length fields from arguments.
2493 __ movq(r8, Operand(rsp, kPointerSize * 1));
2494 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2495 __ movq(r8, Operand(rsp, kPointerSize * 2));
2496 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2497 __ movq(r8, Operand(rsp, kPointerSize * 3));
2498 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2499
2500 // Fill out the elements FixedArray.
2501 // rax: JSArray.
2502 // rcx: FixedArray.
2503 // rbx: Number of elements in array as int32.
2504
2505 // Set map.
Steve Block44f0eee2011-05-26 01:26:41 +01002506 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2507 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002508 // Set length.
2509 __ Integer32ToSmi(rdx, rbx);
2510 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2511 // Fill contents of fixed-array with the-hole.
Steve Block44f0eee2011-05-26 01:26:41 +01002512 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002513 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2514 // Fill fixed array elements with hole.
2515 // rax: JSArray.
2516 // rbx: Number of elements in array that remains to be filled, as int32.
2517 // rcx: Start of elements in FixedArray.
2518 // rdx: the hole.
2519 Label loop;
2520 __ testl(rbx, rbx);
2521 __ bind(&loop);
Steve Block1e0659c2011-05-24 12:43:12 +01002522 __ j(less_equal, &done); // Jump if rcx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002523 __ subl(rbx, Immediate(1));
2524 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2525 __ jmp(&loop);
2526
2527 __ bind(&done);
2528 __ ret(3 * kPointerSize);
2529
2530 __ bind(&slowcase);
2531 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2532}
2533
2534
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002535void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2536 Register object,
2537 Register result,
2538 Register scratch1,
2539 Register scratch2,
2540 bool object_is_smi,
2541 Label* not_found) {
2542 // Use of registers. Register result is used as a temporary.
2543 Register number_string_cache = result;
2544 Register mask = scratch1;
2545 Register scratch = scratch2;
2546
2547 // Load the number string cache.
2548 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2549
2550 // Make the hash mask from the length of the number string cache. It
2551 // contains two elements (number and string) for each cache entry.
2552 __ SmiToInteger32(
2553 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2554 __ shrl(mask, Immediate(1));
2555 __ subq(mask, Immediate(1)); // Make mask.
2556
2557 // Calculate the entry in the number string cache. The hash value in the
2558 // number string cache for smis is just the smi value, and the hash for
2559 // doubles is the xor of the upper and lower words. See
2560 // Heap::GetNumberStringCache.
2561 Label is_smi;
2562 Label load_result_from_cache;
Ben Murdoch257744e2011-11-30 15:57:28 +00002563 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002564 if (!object_is_smi) {
2565 __ JumpIfSmi(object, &is_smi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002566 __ CheckMap(object,
2567 factory->heap_number_map(),
2568 not_found,
2569 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002570
2571 STATIC_ASSERT(8 == kDoubleSize);
2572 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2573 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2574 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2575
2576 Register index = scratch;
2577 Register probe = mask;
2578 __ movq(probe,
2579 FieldOperand(number_string_cache,
2580 index,
2581 times_1,
2582 FixedArray::kHeaderSize));
2583 __ JumpIfSmi(probe, not_found);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002584 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2585 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
2586 __ ucomisd(xmm0, xmm1);
2587 __ j(parity_even, not_found); // Bail out if NaN is involved.
2588 __ j(not_equal, not_found); // The cache did not contain this value.
2589 __ jmp(&load_result_from_cache);
2590 }
2591
2592 __ bind(&is_smi);
2593 __ SmiToInteger32(scratch, object);
2594 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2595
2596 Register index = scratch;
2597 // Check if the entry is the smi we are looking for.
2598 __ cmpq(object,
2599 FieldOperand(number_string_cache,
2600 index,
2601 times_1,
2602 FixedArray::kHeaderSize));
2603 __ j(not_equal, not_found);
2604
2605 // Get the result from the cache.
2606 __ bind(&load_result_from_cache);
2607 __ movq(result,
2608 FieldOperand(number_string_cache,
2609 index,
2610 times_1,
2611 FixedArray::kHeaderSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002612 Counters* counters = masm->isolate()->counters();
2613 __ IncrementCounter(counters->number_to_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002614}
2615
2616
2617void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
2618 Register hash,
2619 Register mask) {
2620 __ and_(hash, mask);
2621 // Each entry in string cache consists of two pointer sized fields,
2622 // but times_twice_pointer_size (multiplication by 16) scale factor
2623 // is not supported by addrmode on x64 platform.
2624 // So we have to premultiply entry index before lookup.
2625 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
2626}
2627
2628
2629void NumberToStringStub::Generate(MacroAssembler* masm) {
2630 Label runtime;
2631
2632 __ movq(rbx, Operand(rsp, kPointerSize));
2633
2634 // Generate code to lookup number in the number string cache.
2635 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
2636 __ ret(1 * kPointerSize);
2637
2638 __ bind(&runtime);
2639 // Handle number to string in the runtime system if not found in the cache.
2640 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
2641}
2642
2643
2644static int NegativeComparisonResult(Condition cc) {
2645 ASSERT(cc != equal);
2646 ASSERT((cc == less) || (cc == less_equal)
2647 || (cc == greater) || (cc == greater_equal));
2648 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2649}
2650
2651
2652void CompareStub::Generate(MacroAssembler* masm) {
2653 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2654
2655 Label check_unequal_objects, done;
Ben Murdoch257744e2011-11-30 15:57:28 +00002656 Factory* factory = masm->isolate()->factory();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002657
2658 // Compare two smis if required.
2659 if (include_smi_compare_) {
2660 Label non_smi, smi_done;
2661 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
2662 __ subq(rdx, rax);
2663 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002664 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002665 __ bind(&smi_done);
2666 __ movq(rax, rdx);
2667 __ ret(0);
2668 __ bind(&non_smi);
2669 } else if (FLAG_debug_code) {
2670 Label ok;
2671 __ JumpIfNotSmi(rdx, &ok);
2672 __ JumpIfNotSmi(rax, &ok);
2673 __ Abort("CompareStub: smi operands");
2674 __ bind(&ok);
2675 }
2676
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002677 // The compare stub returns a positive, negative, or zero 64-bit integer
2678 // value in rax, corresponding to result of comparing the two inputs.
2679 // NOTICE! This code is only reached after a smi-fast-case check, so
2680 // it is certain that at least one operand isn't a smi.
2681
2682 // Two identical objects are equal unless they are both NaN or undefined.
2683 {
Ben Murdoch257744e2011-11-30 15:57:28 +00002684 Label not_identical;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002685 __ cmpq(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002686 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002687
2688 if (cc_ != equal) {
2689 // Check for undefined. undefined OP undefined is false even though
2690 // undefined == undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +00002691 Label check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002692 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00002693 __ j(not_equal, &check_for_nan, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002694 __ Set(rax, NegativeComparisonResult(cc_));
2695 __ ret(0);
2696 __ bind(&check_for_nan);
2697 }
2698
Steve Block44f0eee2011-05-26 01:26:41 +01002699 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002700 // so we do the second best thing - test it ourselves.
2701 // Note: if cc_ != equal, never_nan_nan_ is not used.
2702 // We cannot set rax to EQUAL until just before return because
2703 // rax must be unchanged on jump to not_identical.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002704 if (never_nan_nan_ && (cc_ == equal)) {
2705 __ Set(rax, EQUAL);
2706 __ ret(0);
2707 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002708 Label heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002709 // If it's not a heap number, then return equal for (in)equality operator.
2710 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002711 factory->heap_number_map());
2712 __ j(equal, &heap_number, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002713 if (cc_ != equal) {
2714 // Call runtime on identical JSObjects. Otherwise return equal.
2715 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002716 __ j(above_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002717 }
2718 __ Set(rax, EQUAL);
2719 __ ret(0);
2720
2721 __ bind(&heap_number);
2722 // It is a heap number, so return equal if it's not NaN.
2723 // For NaN, return 1 for every condition except greater and
2724 // greater-equal. Return -1 for them, so the comparison yields
2725 // false for all conditions except not-equal.
2726 __ Set(rax, EQUAL);
2727 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2728 __ ucomisd(xmm0, xmm0);
2729 __ setcc(parity_even, rax);
2730 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
2731 if (cc_ == greater_equal || cc_ == greater) {
2732 __ neg(rax);
2733 }
2734 __ ret(0);
2735 }
2736
2737 __ bind(&not_identical);
2738 }
2739
2740 if (cc_ == equal) { // Both strict and non-strict.
2741 Label slow; // Fallthrough label.
2742
2743 // If we're doing a strict equality comparison, we don't have to do
2744 // type conversion, so we generate code to do fast comparison for objects
2745 // and oddballs. Non-smi numbers and strings still go through the usual
2746 // slow-case code.
2747 if (strict_) {
2748 // If either is a Smi (we know that not both are), then they can only
2749 // be equal if the other is a HeapNumber. If so, use the slow case.
2750 {
2751 Label not_smis;
2752 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
2753
2754 // Check if the non-smi operand is a heap number.
2755 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00002756 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002757 // If heap number, handle it in the slow case.
2758 __ j(equal, &slow);
2759 // Return non-equal. ebx (the lower half of rbx) is not zero.
2760 __ movq(rax, rbx);
2761 __ ret(0);
2762
2763 __ bind(&not_smis);
2764 }
2765
2766 // If either operand is a JSObject or an oddball value, then they are not
2767 // equal since their pointers are different
2768 // There is no test for undetectability in strict equality.
2769
2770 // If the first object is a JS object, we have done pointer comparison.
2771 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00002772 Label first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002773 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002774 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002775 // Return non-zero (eax (not rax) is not zero)
2776 Label return_not_equal;
2777 STATIC_ASSERT(kHeapObjectTag != 0);
2778 __ bind(&return_not_equal);
2779 __ ret(0);
2780
2781 __ bind(&first_non_object);
2782 // Check for oddballs: true, false, null, undefined.
2783 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2784 __ j(equal, &return_not_equal);
2785
2786 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2787 __ j(above_equal, &return_not_equal);
2788
2789 // Check for oddballs: true, false, null, undefined.
2790 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2791 __ j(equal, &return_not_equal);
2792
2793 // Fall through to the general case.
2794 }
2795 __ bind(&slow);
2796 }
2797
2798 // Generate the number comparison code.
2799 if (include_number_compare_) {
2800 Label non_number_comparison;
Ben Murdoch257744e2011-11-30 15:57:28 +00002801 Label unordered;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002802 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
2803 __ xorl(rax, rax);
2804 __ xorl(rcx, rcx);
2805 __ ucomisd(xmm0, xmm1);
2806
2807 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00002808 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002809 // Return a result of -1, 0, or 1, based on EFLAGS.
2810 __ setcc(above, rax);
2811 __ setcc(below, rcx);
2812 __ subq(rax, rcx);
2813 __ ret(0);
2814
2815 // If one of the numbers was NaN, then the result is always false.
2816 // The cc is never not-equal.
2817 __ bind(&unordered);
2818 ASSERT(cc_ != not_equal);
2819 if (cc_ == less || cc_ == less_equal) {
2820 __ Set(rax, 1);
2821 } else {
2822 __ Set(rax, -1);
2823 }
2824 __ ret(0);
2825
2826 // The number comparison code did not provide a valid result.
2827 __ bind(&non_number_comparison);
2828 }
2829
2830 // Fast negative check for symbol-to-symbol equality.
2831 Label check_for_strings;
2832 if (cc_ == equal) {
2833 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
2834 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
2835
2836 // We've already checked for object identity, so if both operands
2837 // are symbols they aren't equal. Register eax (not rax) already holds a
2838 // non-zero value, which indicates not equal, so just return.
2839 __ ret(0);
2840 }
2841
2842 __ bind(&check_for_strings);
2843
2844 __ JumpIfNotBothSequentialAsciiStrings(
2845 rdx, rax, rcx, rbx, &check_unequal_objects);
2846
2847 // Inline comparison of ascii strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002848 if (cc_ == equal) {
2849 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002850 rdx,
2851 rax,
2852 rcx,
Ben Murdoch257744e2011-11-30 15:57:28 +00002853 rbx);
2854 } else {
2855 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2856 rdx,
2857 rax,
2858 rcx,
2859 rbx,
2860 rdi,
2861 r8);
2862 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002863
2864#ifdef DEBUG
2865 __ Abort("Unexpected fall-through from string comparison");
2866#endif
2867
2868 __ bind(&check_unequal_objects);
2869 if (cc_ == equal && !strict_) {
2870 // Not strict equality. Objects are unequal if
2871 // they are both JSObjects and not undetectable,
2872 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00002873 Label not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002874 // At most one is a smi, so we can test for smi by adding the two.
2875 // A smi plus a heap object has the low bit set, a heap object plus
2876 // a heap object has the low bit clear.
2877 STATIC_ASSERT(kSmiTag == 0);
2878 STATIC_ASSERT(kSmiTagMask == 1);
2879 __ lea(rcx, Operand(rax, rdx, times_1, 0));
2880 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002881 __ j(not_zero, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002882 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002883 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002884 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00002885 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002886 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2887 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00002888 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002889 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2890 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00002891 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002892 // The objects are both undetectable, so they both compare as the value
2893 // undefined, and are equal.
2894 __ Set(rax, EQUAL);
2895 __ bind(&return_unequal);
Steve Block1e0659c2011-05-24 12:43:12 +01002896 // Return non-equal by returning the non-zero object pointer in rax,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002897 // or return equal if we fell through to here.
2898 __ ret(0);
2899 __ bind(&not_both_objects);
2900 }
2901
2902 // Push arguments below the return address to prepare jump to builtin.
2903 __ pop(rcx);
2904 __ push(rdx);
2905 __ push(rax);
2906
2907 // Figure out which native to call and setup the arguments.
2908 Builtins::JavaScript builtin;
2909 if (cc_ == equal) {
2910 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2911 } else {
2912 builtin = Builtins::COMPARE;
2913 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
2914 }
2915
2916 // Restore return address on the stack.
2917 __ push(rcx);
2918
2919 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2920 // tagged as a small integer.
2921 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2922}
2923
2924
2925void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
2926 Label* label,
2927 Register object,
2928 Register scratch) {
2929 __ JumpIfSmi(object, label);
2930 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
2931 __ movzxbq(scratch,
2932 FieldOperand(scratch, Map::kInstanceTypeOffset));
2933 // Ensure that no non-strings have the symbol bit set.
2934 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
2935 STATIC_ASSERT(kSymbolTag != 0);
2936 __ testb(scratch, Immediate(kIsSymbolMask));
2937 __ j(zero, label);
2938}
2939
2940
2941void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01002942 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002943}
2944
2945
2946void CallFunctionStub::Generate(MacroAssembler* masm) {
2947 Label slow;
2948
Ben Murdoch257744e2011-11-30 15:57:28 +00002949 // The receiver might implicitly be the global object. This is
2950 // indicated by passing the hole as the receiver to the call
2951 // function stub.
2952 if (ReceiverMightBeImplicit()) {
2953 Label call;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002954 // Get the receiver from the stack.
2955 // +1 ~ return address
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002956 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002957 // Call as function is indicated with the hole.
2958 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2959 __ j(not_equal, &call, Label::kNear);
2960 // Patch the receiver on the stack with the global receiver object.
2961 __ movq(rbx, GlobalObjectOperand());
2962 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2963 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rbx);
2964 __ bind(&call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002965 }
2966
2967 // Get the function to call from the stack.
2968 // +2 ~ receiver, return address
2969 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
2970
2971 // Check that the function really is a JavaScript function.
2972 __ JumpIfSmi(rdi, &slow);
2973 // Goto slow case if we do not have a function.
2974 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2975 __ j(not_equal, &slow);
2976
2977 // Fast-case: Just invoke the function.
2978 ParameterCount actual(argc_);
Ben Murdoch257744e2011-11-30 15:57:28 +00002979
2980 if (ReceiverMightBeImplicit()) {
2981 Label call_as_function;
2982 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2983 __ j(equal, &call_as_function);
2984 __ InvokeFunction(rdi,
2985 actual,
2986 JUMP_FUNCTION,
2987 NullCallWrapper(),
2988 CALL_AS_METHOD);
2989 __ bind(&call_as_function);
2990 }
2991 __ InvokeFunction(rdi,
2992 actual,
2993 JUMP_FUNCTION,
2994 NullCallWrapper(),
2995 CALL_AS_FUNCTION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002996
2997 // Slow-case: Non-function called.
2998 __ bind(&slow);
2999 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3000 // of the original receiver from the call site).
3001 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
3002 __ Set(rax, argc_);
3003 __ Set(rbx, 0);
3004 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01003005 Handle<Code> adaptor =
3006 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003007 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3008}
3009
3010
Steve Block44f0eee2011-05-26 01:26:41 +01003011bool CEntryStub::NeedsImmovableCode() {
3012 return false;
3013}
3014
3015
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003016void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003017 // Throw exception in eax.
3018 __ Throw(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003019}
3020
3021
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003022void CEntryStub::GenerateCore(MacroAssembler* masm,
3023 Label* throw_normal_exception,
3024 Label* throw_termination_exception,
3025 Label* throw_out_of_memory_exception,
3026 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01003027 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003028 // rax: result parameter for PerformGC, if any.
3029 // rbx: pointer to C function (C callee-saved).
3030 // rbp: frame pointer (restored after C call).
3031 // rsp: stack pointer (restored after C call).
3032 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01003033 // r15: pointer to the first argument (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003034 // This pointer is reused in LeaveExitFrame(), so it is stored in a
3035 // callee-saved register.
3036
3037 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
3038 // Complex results must be written to address passed as first argument.
3039 // AMD64 calling convention: a struct of two pointers in rax+rdx
3040
3041 // Check stack alignment.
3042 if (FLAG_debug_code) {
3043 __ CheckStackAlignment();
3044 }
3045
3046 if (do_gc) {
3047 // Pass failure code returned from last attempt as first argument to
3048 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
3049 // stack is known to be aligned. This function takes one argument which is
3050 // passed in register.
3051#ifdef _WIN64
3052 __ movq(rcx, rax);
3053#else // _WIN64
3054 __ movq(rdi, rax);
3055#endif
3056 __ movq(kScratchRegister,
3057 FUNCTION_ADDR(Runtime::PerformGC),
3058 RelocInfo::RUNTIME_ENTRY);
3059 __ call(kScratchRegister);
3060 }
3061
3062 ExternalReference scope_depth =
Steve Block44f0eee2011-05-26 01:26:41 +01003063 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003064 if (always_allocate_scope) {
Steve Block44f0eee2011-05-26 01:26:41 +01003065 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3066 __ incl(scope_depth_operand);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003067 }
3068
3069 // Call C function.
3070#ifdef _WIN64
3071 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
3072 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003073 __ movq(StackSpaceOperand(0), r14); // argc.
Steve Block44f0eee2011-05-26 01:26:41 +01003074 __ movq(StackSpaceOperand(1), r15); // argv.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003075 if (result_size_ < 2) {
3076 // Pass a pointer to the Arguments object as the first argument.
3077 // Return result in single register (rax).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003078 __ lea(rcx, StackSpaceOperand(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003079 __ LoadAddress(rdx, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003080 } else {
3081 ASSERT_EQ(2, result_size_);
3082 // Pass a pointer to the result location as the first argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003083 __ lea(rcx, StackSpaceOperand(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003084 // Pass a pointer to the Arguments object as the second argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003085 __ lea(rdx, StackSpaceOperand(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003086 __ LoadAddress(r8, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003087 }
3088
3089#else // _WIN64
3090 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
3091 __ movq(rdi, r14); // argc.
Steve Block44f0eee2011-05-26 01:26:41 +01003092 __ movq(rsi, r15); // argv.
3093 __ movq(rdx, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003094#endif
3095 __ call(rbx);
3096 // Result is in rax - do not destroy this register!
3097
3098 if (always_allocate_scope) {
Steve Block44f0eee2011-05-26 01:26:41 +01003099 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3100 __ decl(scope_depth_operand);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003101 }
3102
3103 // Check for failure result.
3104 Label failure_returned;
3105 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3106#ifdef _WIN64
3107 // If return value is on the stack, pop it to registers.
3108 if (result_size_ > 1) {
3109 ASSERT_EQ(2, result_size_);
3110 // Read result values stored on stack. Result is stored
3111 // above the four argument mirror slots and the two
3112 // Arguments object slots.
3113 __ movq(rax, Operand(rsp, 6 * kPointerSize));
3114 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
3115 }
3116#endif
3117 __ lea(rcx, Operand(rax, 1));
3118 // Lower 2 bits of rcx are 0 iff rax has failure tag.
3119 __ testl(rcx, Immediate(kFailureTagMask));
3120 __ j(zero, &failure_returned);
3121
3122 // Exit the JavaScript to C++ exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +01003123 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003124 __ ret(0);
3125
3126 // Handling of failure.
3127 __ bind(&failure_returned);
3128
Ben Murdoch257744e2011-11-30 15:57:28 +00003129 Label retry;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003130 // If the returned exception is RETRY_AFTER_GC continue at retry label
3131 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3132 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003133 __ j(zero, &retry, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003134
3135 // Special handling of out of memory exceptions.
3136 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
3137 __ cmpq(rax, kScratchRegister);
3138 __ j(equal, throw_out_of_memory_exception);
3139
3140 // Retrieve the pending exception and clear the variable.
Steve Block44f0eee2011-05-26 01:26:41 +01003141 ExternalReference pending_exception_address(
3142 Isolate::k_pending_exception_address, masm->isolate());
3143 Operand pending_exception_operand =
3144 masm->ExternalOperand(pending_exception_address);
3145 __ movq(rax, pending_exception_operand);
3146 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3147 __ movq(pending_exception_operand, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003148
3149 // Special handling of termination exceptions which are uncatchable
3150 // by javascript code.
3151 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
3152 __ j(equal, throw_termination_exception);
3153
3154 // Handle normal exception.
3155 __ jmp(throw_normal_exception);
3156
3157 // Retry.
3158 __ bind(&retry);
3159}
3160
3161
3162void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3163 UncatchableExceptionType type) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003164 __ ThrowUncatchable(type, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003165}
3166
3167
3168void CEntryStub::Generate(MacroAssembler* masm) {
3169 // rax: number of arguments including receiver
3170 // rbx: pointer to C function (C callee-saved)
3171 // rbp: frame pointer of calling JS frame (restored after C call)
3172 // rsp: stack pointer (restored after C call)
3173 // rsi: current context (restored)
3174
3175 // NOTE: Invocations of builtins may return failure objects
3176 // instead of a proper result. The builtin entry handles
3177 // this by performing a garbage collection and retrying the
3178 // builtin once.
3179
3180 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003181#ifdef _WIN64
3182 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
3183#else
3184 int arg_stack_space = 0;
3185#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003186 __ EnterExitFrame(arg_stack_space, save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003187
3188 // rax: Holds the context at this point, but should not be used.
3189 // On entry to code generated by GenerateCore, it must hold
3190 // a failure result if the collect_garbage argument to GenerateCore
3191 // is true. This failure result can be the result of code
3192 // generated by a previous call to GenerateCore. The value
3193 // of rax is then passed to Runtime::PerformGC.
3194 // rbx: pointer to builtin function (C callee-saved).
3195 // rbp: frame pointer of exit frame (restored after C call).
3196 // rsp: stack pointer (restored after C call).
3197 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01003198 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003199
3200 Label throw_normal_exception;
3201 Label throw_termination_exception;
3202 Label throw_out_of_memory_exception;
3203
3204 // Call into the runtime system.
3205 GenerateCore(masm,
3206 &throw_normal_exception,
3207 &throw_termination_exception,
3208 &throw_out_of_memory_exception,
3209 false,
3210 false);
3211
3212 // Do space-specific GC and retry runtime call.
3213 GenerateCore(masm,
3214 &throw_normal_exception,
3215 &throw_termination_exception,
3216 &throw_out_of_memory_exception,
3217 true,
3218 false);
3219
3220 // Do full GC and retry runtime call one final time.
3221 Failure* failure = Failure::InternalError();
3222 __ movq(rax, failure, RelocInfo::NONE);
3223 GenerateCore(masm,
3224 &throw_normal_exception,
3225 &throw_termination_exception,
3226 &throw_out_of_memory_exception,
3227 true,
3228 true);
3229
3230 __ bind(&throw_out_of_memory_exception);
3231 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
3232
3233 __ bind(&throw_termination_exception);
3234 GenerateThrowUncatchable(masm, TERMINATION);
3235
3236 __ bind(&throw_normal_exception);
3237 GenerateThrowTOS(masm);
3238}
3239
3240
3241void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3242 Label invoke, exit;
3243#ifdef ENABLE_LOGGING_AND_PROFILING
3244 Label not_outermost_js, not_outermost_js_2;
3245#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003246 { // NOLINT. Scope block confuses linter.
3247 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
3248 // Setup frame.
3249 __ push(rbp);
3250 __ movq(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003251
Steve Block44f0eee2011-05-26 01:26:41 +01003252 // Push the stack frame type marker twice.
3253 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3254 // Scratch register is neither callee-save, nor an argument register on any
3255 // platform. It's free to use at this point.
3256 // Cannot use smi-register for loading yet.
3257 __ movq(kScratchRegister,
3258 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3259 RelocInfo::NONE);
3260 __ push(kScratchRegister); // context slot
3261 __ push(kScratchRegister); // function slot
3262 // Save callee-saved registers (X64/Win64 calling conventions).
3263 __ push(r12);
3264 __ push(r13);
3265 __ push(r14);
3266 __ push(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003267#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01003268 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3269 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003270#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003271 __ push(rbx);
3272 // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3273 // callee save as well.
3274
3275 // Set up the roots and smi constant registers.
3276 // Needs to be done before any further smi loads.
3277 __ InitializeSmiConstantRegister();
3278 __ InitializeRootRegister();
3279 }
3280
3281 Isolate* isolate = masm->isolate();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003282
3283 // Save copies of the top frame descriptor on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003284 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate);
3285 {
3286 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3287 __ push(c_entry_fp_operand);
3288 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003289
3290#ifdef ENABLE_LOGGING_AND_PROFILING
3291 // If this is the outermost JS call, set js_entry_sp value.
Steve Block44f0eee2011-05-26 01:26:41 +01003292 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
3293 __ Load(rax, js_entry_sp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003294 __ testq(rax, rax);
3295 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01003296 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003297 __ movq(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01003298 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01003299 Label cont;
3300 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003301 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01003302 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
3303 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003304#endif
3305
3306 // Call a faked try-block that does the invoke.
3307 __ call(&invoke);
3308
3309 // Caught exception: Store result (exception) in the pending
3310 // exception field in the JSEnv and return a failure sentinel.
Steve Block44f0eee2011-05-26 01:26:41 +01003311 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3312 isolate);
3313 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003314 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
3315 __ jmp(&exit);
3316
3317 // Invoke: Link this frame into the handler chain.
3318 __ bind(&invoke);
3319 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3320
3321 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01003322 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
3323 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003324
3325 // Fake a receiver (NULL).
3326 __ push(Immediate(0)); // receiver
3327
3328 // Invoke the function by calling through JS entry trampoline
3329 // builtin and pop the faked function when we return. We load the address
3330 // from an external reference instead of inlining the call target address
3331 // directly in the code, because the builtin stubs may not have been
3332 // generated yet at the time this code is generated.
3333 if (is_construct) {
Steve Block44f0eee2011-05-26 01:26:41 +01003334 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3335 isolate);
3336 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003337 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003338 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
3339 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003340 }
3341 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3342 __ call(kScratchRegister);
3343
3344 // Unlink this frame from the handler chain.
Steve Block053d10c2011-06-13 19:13:29 +01003345 __ PopTryHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003346
Steve Block053d10c2011-06-13 19:13:29 +01003347 __ bind(&exit);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003348#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block053d10c2011-06-13 19:13:29 +01003349 // Check if the current stack frame is marked as the outermost JS frame.
3350 __ pop(rbx);
3351 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003352 __ j(not_equal, &not_outermost_js_2);
Steve Block053d10c2011-06-13 19:13:29 +01003353 __ movq(kScratchRegister, js_entry_sp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003354 __ movq(Operand(kScratchRegister, 0), Immediate(0));
3355 __ bind(&not_outermost_js_2);
3356#endif
3357
3358 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01003359 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Steve Block44f0eee2011-05-26 01:26:41 +01003360 __ pop(c_entry_fp_operand);
3361 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003362
3363 // Restore callee-saved registers (X64 conventions).
3364 __ pop(rbx);
3365#ifdef _WIN64
3366 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
3367 __ pop(rsi);
3368 __ pop(rdi);
3369#endif
3370 __ pop(r15);
3371 __ pop(r14);
3372 __ pop(r13);
3373 __ pop(r12);
3374 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3375
3376 // Restore frame pointer and return.
3377 __ pop(rbp);
3378 __ ret(0);
3379}
3380
3381
3382void InstanceofStub::Generate(MacroAssembler* masm) {
3383 // Implements "value instanceof function" operator.
Steve Block44f0eee2011-05-26 01:26:41 +01003384 // Expected input state with no inline cache:
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003385 // rsp[0] : return address
3386 // rsp[1] : function pointer
3387 // rsp[2] : value
Steve Block44f0eee2011-05-26 01:26:41 +01003388 // Expected input state with an inline one-element cache:
3389 // rsp[0] : return address
3390 // rsp[1] : offset from return address to location of inline cache
3391 // rsp[2] : function pointer
3392 // rsp[3] : value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003393 // Returns a bitwise zero to indicate that the value
3394 // is and instance of the function and anything else to
3395 // indicate that the value is not an instance.
3396
Ben Murdoch8b112d22011-06-08 16:22:53 +01003397 static const int kOffsetToMapCheckValue = 2;
3398 static const int kOffsetToResultValue = 18;
Steve Block44f0eee2011-05-26 01:26:41 +01003399 // The last 4 bytes of the instruction sequence
Ben Murdoch8b112d22011-06-08 16:22:53 +01003400 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset))
Steve Block44f0eee2011-05-26 01:26:41 +01003401 // Move(kScratchRegister, FACTORY->the_hole_value())
3402 // in front of the hole value address.
3403 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
3404 // The last 4 bytes of the instruction sequence
3405 // __ j(not_equal, &cache_miss);
3406 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
3407 // before the offset of the hole value in the root array.
3408 static const unsigned int kWordBeforeResultValue = 0x458B4909;
3409 // Only the inline check flag is supported on X64.
3410 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
3411 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003412
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003413 // Get the object - go slow case if it's a smi.
3414 Label slow;
Steve Block44f0eee2011-05-26 01:26:41 +01003415
3416 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003417 __ JumpIfSmi(rax, &slow);
3418
3419 // Check that the left hand is a JS object. Leave its map in rax.
3420 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
3421 __ j(below, &slow);
3422 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
3423 __ j(above, &slow);
3424
3425 // Get the prototype of the function.
Steve Block44f0eee2011-05-26 01:26:41 +01003426 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003427 // rdx is function, rax is map.
3428
Steve Block44f0eee2011-05-26 01:26:41 +01003429 // If there is a call site cache don't look in the global cache, but do the
3430 // real lookup and update the call site cache.
3431 if (!HasCallSiteInlineCheck()) {
3432 // Look up the function and the map in the instanceof cache.
Ben Murdoch257744e2011-11-30 15:57:28 +00003433 Label miss;
Steve Block44f0eee2011-05-26 01:26:41 +01003434 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003435 __ j(not_equal, &miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01003436 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00003437 __ j(not_equal, &miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01003438 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3439 __ ret(2 * kPointerSize);
3440 __ bind(&miss);
3441 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003442
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003443 __ TryGetFunctionPrototype(rdx, rbx, &slow);
3444
3445 // Check that the function prototype is a JS object.
3446 __ JumpIfSmi(rbx, &slow);
3447 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
3448 __ j(below, &slow);
3449 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3450 __ j(above, &slow);
3451
3452 // Register mapping:
3453 // rax is object map.
3454 // rdx is function.
3455 // rbx is function prototype.
Steve Block44f0eee2011-05-26 01:26:41 +01003456 if (!HasCallSiteInlineCheck()) {
3457 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3458 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3459 } else {
3460 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3461 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3462 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3463 if (FLAG_debug_code) {
3464 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3465 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003466 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
Steve Block44f0eee2011-05-26 01:26:41 +01003467 }
3468 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003469
3470 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3471
3472 // Loop through the prototype chain looking for the function prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00003473 Label loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003474 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
3475 __ bind(&loop);
3476 __ cmpq(rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003477 __ j(equal, &is_instance, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003478 __ cmpq(rcx, kScratchRegister);
3479 // The code at is_not_instance assumes that kScratchRegister contains a
3480 // non-zero GCable value (the null object in this case).
Ben Murdoch257744e2011-11-30 15:57:28 +00003481 __ j(equal, &is_not_instance, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003482 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3483 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
3484 __ jmp(&loop);
3485
3486 __ bind(&is_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01003487 if (!HasCallSiteInlineCheck()) {
3488 __ xorl(rax, rax);
3489 // Store bitwise zero in the cache. This is a Smi in GC terms.
3490 STATIC_ASSERT(kSmiTag == 0);
3491 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3492 } else {
3493 // Store offset of true in the root array at the inline check site.
3494 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3495 == 0xB0 - 0x100);
3496 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize.
3497 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3498 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3499 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3500 if (FLAG_debug_code) {
3501 __ movl(rax, Immediate(kWordBeforeResultValue));
3502 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003503 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
Steve Block44f0eee2011-05-26 01:26:41 +01003504 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003505 __ Set(rax, 0);
Steve Block44f0eee2011-05-26 01:26:41 +01003506 }
3507 __ ret(2 * kPointerSize + extra_stack_space);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003508
3509 __ bind(&is_not_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01003510 if (!HasCallSiteInlineCheck()) {
3511 // We have to store a non-zero value in the cache.
3512 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3513 } else {
3514 // Store offset of false in the root array at the inline check site.
3515 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3516 == 0xB8 - 0x100);
3517 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize.
3518 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3519 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3520 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3521 if (FLAG_debug_code) {
3522 __ movl(rax, Immediate(kWordBeforeResultValue));
3523 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3524 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3525 }
3526 }
3527 __ ret(2 * kPointerSize + extra_stack_space);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003528
3529 // Slow-case: Go through the JavaScript implementation.
3530 __ bind(&slow);
Steve Block44f0eee2011-05-26 01:26:41 +01003531 if (HasCallSiteInlineCheck()) {
3532 // Remove extra value from the stack.
3533 __ pop(rcx);
3534 __ pop(rax);
3535 __ push(rcx);
3536 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003537 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3538}
3539
3540
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003541// Passing arguments in registers is not supported.
3542Register InstanceofStub::left() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003543
3544
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003545Register InstanceofStub::right() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003546
3547
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003548int CompareStub::MinorKey() {
3549 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3550 // stubs the never NaN NaN condition is only taken into account if the
3551 // condition is equals.
3552 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
3553 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3554 return ConditionField::encode(static_cast<unsigned>(cc_))
3555 | RegisterField::encode(false) // lhs_ and rhs_ are not used
3556 | StrictField::encode(strict_)
3557 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003558 | IncludeNumberCompareField::encode(include_number_compare_)
3559 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003560}
3561
3562
3563// Unfortunately you have to run without snapshots to see most of these
3564// names in the profile since most compare stubs end up in the snapshot.
3565const char* CompareStub::GetName() {
3566 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3567
3568 if (name_ != NULL) return name_;
3569 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +01003570 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
3571 kMaxNameLength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003572 if (name_ == NULL) return "OOM";
3573
3574 const char* cc_name;
3575 switch (cc_) {
3576 case less: cc_name = "LT"; break;
3577 case greater: cc_name = "GT"; break;
3578 case less_equal: cc_name = "LE"; break;
3579 case greater_equal: cc_name = "GE"; break;
3580 case equal: cc_name = "EQ"; break;
3581 case not_equal: cc_name = "NE"; break;
3582 default: cc_name = "UnknownCondition"; break;
3583 }
3584
3585 const char* strict_name = "";
3586 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
3587 strict_name = "_STRICT";
3588 }
3589
3590 const char* never_nan_nan_name = "";
3591 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
3592 never_nan_nan_name = "_NO_NAN";
3593 }
3594
3595 const char* include_number_compare_name = "";
3596 if (!include_number_compare_) {
3597 include_number_compare_name = "_NO_NUMBER";
3598 }
3599
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003600 const char* include_smi_compare_name = "";
3601 if (!include_smi_compare_) {
3602 include_smi_compare_name = "_NO_SMI";
3603 }
3604
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003605 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3606 "CompareStub_%s%s%s%s",
3607 cc_name,
3608 strict_name,
3609 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003610 include_number_compare_name,
3611 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003612 return name_;
3613}
3614
3615
3616// -------------------------------------------------------------------------
3617// StringCharCodeAtGenerator
3618
3619void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3620 Label flat_string;
3621 Label ascii_string;
3622 Label got_char_code;
3623
3624 // If the receiver is a smi trigger the non-string case.
3625 __ JumpIfSmi(object_, receiver_not_string_);
3626
3627 // Fetch the instance type of the receiver into result register.
3628 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3629 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3630 // If the receiver is not a string trigger the non-string case.
3631 __ testb(result_, Immediate(kIsNotStringMask));
3632 __ j(not_zero, receiver_not_string_);
3633
3634 // If the index is non-smi trigger the non-smi case.
3635 __ JumpIfNotSmi(index_, &index_not_smi_);
3636
3637 // Put smi-tagged index into scratch register.
3638 __ movq(scratch_, index_);
3639 __ bind(&got_smi_index_);
3640
3641 // Check for index out of range.
3642 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
3643 __ j(above_equal, index_out_of_range_);
3644
3645 // We need special handling for non-flat strings.
3646 STATIC_ASSERT(kSeqStringTag == 0);
3647 __ testb(result_, Immediate(kStringRepresentationMask));
3648 __ j(zero, &flat_string);
3649
3650 // Handle non-flat strings.
3651 __ testb(result_, Immediate(kIsConsStringMask));
3652 __ j(zero, &call_runtime_);
3653
3654 // ConsString.
3655 // Check whether the right hand side is the empty string (i.e. if
3656 // this is really a flat string in a cons string). If that is not
3657 // the case we would rather go to the runtime system now to flatten
3658 // the string.
3659 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
3660 Heap::kEmptyStringRootIndex);
3661 __ j(not_equal, &call_runtime_);
3662 // Get the first of the two strings and load its instance type.
3663 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
3664 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3665 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3666 // If the first cons component is also non-flat, then go to runtime.
3667 STATIC_ASSERT(kSeqStringTag == 0);
3668 __ testb(result_, Immediate(kStringRepresentationMask));
3669 __ j(not_zero, &call_runtime_);
3670
3671 // Check for 1-byte or 2-byte string.
3672 __ bind(&flat_string);
3673 STATIC_ASSERT(kAsciiStringTag != 0);
3674 __ testb(result_, Immediate(kStringEncodingMask));
3675 __ j(not_zero, &ascii_string);
3676
3677 // 2-byte string.
3678 // Load the 2-byte character code into the result register.
3679 __ SmiToInteger32(scratch_, scratch_);
3680 __ movzxwl(result_, FieldOperand(object_,
3681 scratch_, times_2,
3682 SeqTwoByteString::kHeaderSize));
3683 __ jmp(&got_char_code);
3684
3685 // ASCII string.
3686 // Load the byte into the result register.
3687 __ bind(&ascii_string);
3688 __ SmiToInteger32(scratch_, scratch_);
3689 __ movzxbl(result_, FieldOperand(object_,
3690 scratch_, times_1,
3691 SeqAsciiString::kHeaderSize));
3692 __ bind(&got_char_code);
3693 __ Integer32ToSmi(result_, result_);
3694 __ bind(&exit_);
3695}
3696
3697
3698void StringCharCodeAtGenerator::GenerateSlow(
3699 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3700 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
3701
Ben Murdoch257744e2011-11-30 15:57:28 +00003702 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003703 // Index is not a smi.
3704 __ bind(&index_not_smi_);
3705 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00003706 __ CheckMap(index_,
3707 factory->heap_number_map(),
3708 index_not_number_,
3709 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003710 call_helper.BeforeCall(masm);
3711 __ push(object_);
3712 __ push(index_);
3713 __ push(index_); // Consumed by runtime conversion function.
3714 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3715 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3716 } else {
3717 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3718 // NumberToSmi discards numbers that are not exact integers.
3719 __ CallRuntime(Runtime::kNumberToSmi, 1);
3720 }
3721 if (!scratch_.is(rax)) {
3722 // Save the conversion result before the pop instructions below
3723 // have a chance to overwrite it.
3724 __ movq(scratch_, rax);
3725 }
3726 __ pop(index_);
3727 __ pop(object_);
3728 // Reload the instance type.
3729 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3730 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3731 call_helper.AfterCall(masm);
3732 // If index is still not a smi, it must be out of range.
3733 __ JumpIfNotSmi(scratch_, index_out_of_range_);
3734 // Otherwise, return to the fast path.
3735 __ jmp(&got_smi_index_);
3736
3737 // Call runtime. We get here when the receiver is a string and the
3738 // index is a number, but the code of getting the actual character
3739 // is too complex (e.g., when the string needs to be flattened).
3740 __ bind(&call_runtime_);
3741 call_helper.BeforeCall(masm);
3742 __ push(object_);
3743 __ push(index_);
3744 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3745 if (!result_.is(rax)) {
3746 __ movq(result_, rax);
3747 }
3748 call_helper.AfterCall(masm);
3749 __ jmp(&exit_);
3750
3751 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
3752}
3753
3754
3755// -------------------------------------------------------------------------
3756// StringCharFromCodeGenerator
3757
3758void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3759 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3760 __ JumpIfNotSmi(code_, &slow_case_);
3761 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
3762 __ j(above, &slow_case_);
3763
3764 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3765 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3766 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
3767 FixedArray::kHeaderSize));
3768 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3769 __ j(equal, &slow_case_);
3770 __ bind(&exit_);
3771}
3772
3773
3774void StringCharFromCodeGenerator::GenerateSlow(
3775 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3776 __ Abort("Unexpected fallthrough to CharFromCode slow case");
3777
3778 __ bind(&slow_case_);
3779 call_helper.BeforeCall(masm);
3780 __ push(code_);
3781 __ CallRuntime(Runtime::kCharFromCode, 1);
3782 if (!result_.is(rax)) {
3783 __ movq(result_, rax);
3784 }
3785 call_helper.AfterCall(masm);
3786 __ jmp(&exit_);
3787
3788 __ Abort("Unexpected fallthrough from CharFromCode slow case");
3789}
3790
3791
3792// -------------------------------------------------------------------------
3793// StringCharAtGenerator
3794
3795void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
3796 char_code_at_generator_.GenerateFast(masm);
3797 char_from_code_generator_.GenerateFast(masm);
3798}
3799
3800
3801void StringCharAtGenerator::GenerateSlow(
3802 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3803 char_code_at_generator_.GenerateSlow(masm, call_helper);
3804 char_from_code_generator_.GenerateSlow(masm, call_helper);
3805}
3806
3807
3808void StringAddStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003809 Label string_add_runtime, call_builtin;
3810 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003811
3812 // Load the two arguments.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003813 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
3814 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003815
3816 // Make sure that both arguments are strings if not known in advance.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003817 if (flags_ == NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003818 Condition is_smi;
3819 is_smi = masm->CheckSmi(rax);
3820 __ j(is_smi, &string_add_runtime);
3821 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
3822 __ j(above_equal, &string_add_runtime);
3823
3824 // First argument is a a string, test second.
3825 is_smi = masm->CheckSmi(rdx);
3826 __ j(is_smi, &string_add_runtime);
3827 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3828 __ j(above_equal, &string_add_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003829 } else {
3830 // Here at least one of the arguments is definitely a string.
3831 // We convert the one that is not known to be a string.
3832 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
3833 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
3834 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
3835 &call_builtin);
3836 builtin_id = Builtins::STRING_ADD_RIGHT;
3837 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
3838 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
3839 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
3840 &call_builtin);
3841 builtin_id = Builtins::STRING_ADD_LEFT;
3842 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003843 }
3844
3845 // Both arguments are strings.
3846 // rax: first string
3847 // rdx: second string
3848 // Check if either of the strings are empty. In that case return the other.
Ben Murdoch257744e2011-11-30 15:57:28 +00003849 Label second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003850 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
3851 __ SmiTest(rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003852 __ j(not_zero, &second_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003853 // Second string is empty, result is first string which is already in rax.
Steve Block44f0eee2011-05-26 01:26:41 +01003854 Counters* counters = masm->isolate()->counters();
3855 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003856 __ ret(2 * kPointerSize);
3857 __ bind(&second_not_zero_length);
3858 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
3859 __ SmiTest(rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003860 __ j(not_zero, &both_not_zero_length, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003861 // First string is empty, result is second string which is in rdx.
3862 __ movq(rax, rdx);
Steve Block44f0eee2011-05-26 01:26:41 +01003863 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003864 __ ret(2 * kPointerSize);
3865
3866 // Both strings are non-empty.
3867 // rax: first string
3868 // rbx: length of first string
3869 // rcx: length of second string
3870 // rdx: second string
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003871 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
3872 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003873 Label string_add_flat_result, longer_than_two;
3874 __ bind(&both_not_zero_length);
3875
3876 // If arguments where known to be strings, maps are not loaded to r8 and r9
3877 // by the code above.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003878 if (flags_ != NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003879 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
3880 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
3881 }
3882 // Get the instance types of the two strings as they will be needed soon.
3883 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
3884 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
3885
3886 // Look at the length of the result of adding the two strings.
3887 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003888 __ SmiAdd(rbx, rbx, rcx);
Steve Block44f0eee2011-05-26 01:26:41 +01003889 // Use the symbol table when adding two one character strings, as it
3890 // helps later optimizations to return a symbol here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003891 __ SmiCompare(rbx, Smi::FromInt(2));
3892 __ j(not_equal, &longer_than_two);
3893
3894 // Check that both strings are non-external ascii strings.
3895 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
3896 &string_add_runtime);
3897
3898 // Get the two characters forming the sub string.
3899 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
3900 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
3901
3902 // Try to lookup two character string in symbol table. If it is not found
3903 // just allocate a new one.
3904 Label make_two_character_string, make_flat_ascii_string;
3905 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Steve Block44f0eee2011-05-26 01:26:41 +01003906 masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
3907 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003908 __ ret(2 * kPointerSize);
3909
3910 __ bind(&make_two_character_string);
3911 __ Set(rbx, 2);
3912 __ jmp(&make_flat_ascii_string);
3913
3914 __ bind(&longer_than_two);
3915 // Check if resulting string will be flat.
3916 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
3917 __ j(below, &string_add_flat_result);
3918 // Handle exceptionally long strings in the runtime system.
3919 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
3920 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
3921 __ j(above, &string_add_runtime);
3922
3923 // If result is not supposed to be flat, allocate a cons string object. If
3924 // both strings are ascii the result is an ascii cons string.
3925 // rax: first string
3926 // rbx: length of resulting flat string
3927 // rdx: second string
3928 // r8: instance type of first string
3929 // r9: instance type of second string
3930 Label non_ascii, allocated, ascii_data;
3931 __ movl(rcx, r8);
3932 __ and_(rcx, r9);
3933 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
3934 __ testl(rcx, Immediate(kAsciiStringTag));
3935 __ j(zero, &non_ascii);
3936 __ bind(&ascii_data);
3937 // Allocate an acsii cons string.
3938 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
3939 __ bind(&allocated);
3940 // Fill the fields of the cons string.
3941 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
3942 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
3943 Immediate(String::kEmptyHashField));
3944 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3945 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3946 __ movq(rax, rcx);
Steve Block44f0eee2011-05-26 01:26:41 +01003947 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003948 __ ret(2 * kPointerSize);
3949 __ bind(&non_ascii);
3950 // At least one of the strings is two-byte. Check whether it happens
3951 // to contain only ascii characters.
3952 // rcx: first instance type AND second instance type.
3953 // r8: first instance type.
3954 // r9: second instance type.
3955 __ testb(rcx, Immediate(kAsciiDataHintMask));
3956 __ j(not_zero, &ascii_data);
3957 __ xor_(r8, r9);
3958 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
3959 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3960 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3961 __ j(equal, &ascii_data);
3962 // Allocate a two byte cons string.
3963 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
3964 __ jmp(&allocated);
3965
3966 // Handle creating a flat result. First check that both strings are not
3967 // external strings.
3968 // rax: first string
3969 // rbx: length of resulting flat string as smi
3970 // rdx: second string
3971 // r8: instance type of first string
3972 // r9: instance type of first string
3973 __ bind(&string_add_flat_result);
3974 __ SmiToInteger32(rbx, rbx);
3975 __ movl(rcx, r8);
3976 __ and_(rcx, Immediate(kStringRepresentationMask));
3977 __ cmpl(rcx, Immediate(kExternalStringTag));
3978 __ j(equal, &string_add_runtime);
3979 __ movl(rcx, r9);
3980 __ and_(rcx, Immediate(kStringRepresentationMask));
3981 __ cmpl(rcx, Immediate(kExternalStringTag));
3982 __ j(equal, &string_add_runtime);
3983 // Now check if both strings are ascii strings.
3984 // rax: first string
3985 // rbx: length of resulting flat string
3986 // rdx: second string
3987 // r8: instance type of first string
3988 // r9: instance type of second string
3989 Label non_ascii_string_add_flat_result;
3990 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
3991 __ testl(r8, Immediate(kAsciiStringTag));
3992 __ j(zero, &non_ascii_string_add_flat_result);
3993 __ testl(r9, Immediate(kAsciiStringTag));
3994 __ j(zero, &string_add_runtime);
3995
3996 __ bind(&make_flat_ascii_string);
3997 // Both strings are ascii strings. As they are short they are both flat.
3998 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
3999 // rcx: result string
4000 __ movq(rbx, rcx);
4001 // Locate first character of result.
4002 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4003 // Locate first character of first argument
4004 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4005 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4006 // rax: first char of first argument
4007 // rbx: result string
4008 // rcx: first character of result
4009 // rdx: second string
4010 // rdi: length of first argument
4011 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
4012 // Locate first character of second argument.
4013 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4014 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4015 // rbx: result string
4016 // rcx: next character of result
4017 // rdx: first char of second argument
4018 // rdi: length of second argument
4019 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
4020 __ movq(rax, rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01004021 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004022 __ ret(2 * kPointerSize);
4023
4024 // Handle creating a flat two byte result.
4025 // rax: first string - known to be two byte
4026 // rbx: length of resulting flat string
4027 // rdx: second string
4028 // r8: instance type of first string
4029 // r9: instance type of first string
4030 __ bind(&non_ascii_string_add_flat_result);
4031 __ and_(r9, Immediate(kAsciiStringTag));
4032 __ j(not_zero, &string_add_runtime);
4033 // Both strings are two byte strings. As they are short they are both
4034 // flat.
4035 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4036 // rcx: result string
4037 __ movq(rbx, rcx);
4038 // Locate first character of result.
4039 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4040 // Locate first character of first argument.
4041 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4042 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4043 // rax: first char of first argument
4044 // rbx: result string
4045 // rcx: first character of result
4046 // rdx: second argument
4047 // rdi: length of first argument
4048 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
4049 // Locate first character of second argument.
4050 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4051 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4052 // rbx: result string
4053 // rcx: next character of result
4054 // rdx: first char of second argument
4055 // rdi: length of second argument
4056 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
4057 __ movq(rax, rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01004058 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004059 __ ret(2 * kPointerSize);
4060
4061 // Just jump to runtime to add the two strings.
4062 __ bind(&string_add_runtime);
4063 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004064
4065 if (call_builtin.is_linked()) {
4066 __ bind(&call_builtin);
4067 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
4068 }
4069}
4070
4071
4072void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4073 int stack_offset,
4074 Register arg,
4075 Register scratch1,
4076 Register scratch2,
4077 Register scratch3,
4078 Label* slow) {
4079 // First check if the argument is already a string.
4080 Label not_string, done;
4081 __ JumpIfSmi(arg, &not_string);
4082 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
4083 __ j(below, &done);
4084
4085 // Check the number to string cache.
4086 Label not_cached;
4087 __ bind(&not_string);
4088 // Puts the cached result into scratch1.
4089 NumberToStringStub::GenerateLookupNumberStringCache(masm,
4090 arg,
4091 scratch1,
4092 scratch2,
4093 scratch3,
4094 false,
4095 &not_cached);
4096 __ movq(arg, scratch1);
4097 __ movq(Operand(rsp, stack_offset), arg);
4098 __ jmp(&done);
4099
4100 // Check if the argument is a safe string wrapper.
4101 __ bind(&not_cached);
4102 __ JumpIfSmi(arg, slow);
4103 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
4104 __ j(not_equal, slow);
4105 __ testb(FieldOperand(scratch1, Map::kBitField2Offset),
4106 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
4107 __ j(zero, slow);
4108 __ movq(arg, FieldOperand(arg, JSValue::kValueOffset));
4109 __ movq(Operand(rsp, stack_offset), arg);
4110
4111 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004112}
4113
4114
4115void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
4116 Register dest,
4117 Register src,
4118 Register count,
4119 bool ascii) {
4120 Label loop;
4121 __ bind(&loop);
4122 // This loop just copies one character at a time, as it is only used for very
4123 // short strings.
4124 if (ascii) {
4125 __ movb(kScratchRegister, Operand(src, 0));
4126 __ movb(Operand(dest, 0), kScratchRegister);
4127 __ incq(src);
4128 __ incq(dest);
4129 } else {
4130 __ movzxwl(kScratchRegister, Operand(src, 0));
4131 __ movw(Operand(dest, 0), kScratchRegister);
4132 __ addq(src, Immediate(2));
4133 __ addq(dest, Immediate(2));
4134 }
4135 __ decl(count);
4136 __ j(not_zero, &loop);
4137}
4138
4139
4140void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4141 Register dest,
4142 Register src,
4143 Register count,
4144 bool ascii) {
4145 // Copy characters using rep movs of doublewords. Align destination on 4 byte
4146 // boundary before starting rep movs. Copy remaining characters after running
4147 // rep movs.
4148 // Count is positive int32, dest and src are character pointers.
4149 ASSERT(dest.is(rdi)); // rep movs destination
4150 ASSERT(src.is(rsi)); // rep movs source
4151 ASSERT(count.is(rcx)); // rep movs count
4152
4153 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00004154 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004155 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00004156 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004157
4158 // Make count the number of bytes to copy.
4159 if (!ascii) {
4160 STATIC_ASSERT(2 == sizeof(uc16));
4161 __ addl(count, count);
4162 }
4163
4164 // Don't enter the rep movs if there are less than 4 bytes to copy.
Ben Murdoch257744e2011-11-30 15:57:28 +00004165 Label last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004166 __ testl(count, Immediate(~7));
Ben Murdoch257744e2011-11-30 15:57:28 +00004167 __ j(zero, &last_bytes, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004168
4169 // Copy from edi to esi using rep movs instruction.
4170 __ movl(kScratchRegister, count);
4171 __ shr(count, Immediate(3)); // Number of doublewords to copy.
4172 __ repmovsq();
4173
4174 // Find number of bytes left.
4175 __ movl(count, kScratchRegister);
4176 __ and_(count, Immediate(7));
4177
4178 // Check if there are more bytes to copy.
4179 __ bind(&last_bytes);
4180 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00004181 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004182
4183 // Copy remaining characters.
4184 Label loop;
4185 __ bind(&loop);
4186 __ movb(kScratchRegister, Operand(src, 0));
4187 __ movb(Operand(dest, 0), kScratchRegister);
4188 __ incq(src);
4189 __ incq(dest);
4190 __ decl(count);
4191 __ j(not_zero, &loop);
4192
4193 __ bind(&done);
4194}
4195
4196void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4197 Register c1,
4198 Register c2,
4199 Register scratch1,
4200 Register scratch2,
4201 Register scratch3,
4202 Register scratch4,
4203 Label* not_found) {
4204 // Register scratch3 is the general scratch register in this function.
4205 Register scratch = scratch3;
4206
4207 // Make sure that both characters are not digits as such strings has a
4208 // different hash algorithm. Don't try to look for these in the symbol table.
Ben Murdoch257744e2011-11-30 15:57:28 +00004209 Label not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004210 __ leal(scratch, Operand(c1, -'0'));
4211 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
Ben Murdoch257744e2011-11-30 15:57:28 +00004212 __ j(above, &not_array_index, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004213 __ leal(scratch, Operand(c2, -'0'));
4214 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4215 __ j(below_equal, not_found);
4216
4217 __ bind(&not_array_index);
4218 // Calculate the two character string hash.
4219 Register hash = scratch1;
4220 GenerateHashInit(masm, hash, c1, scratch);
4221 GenerateHashAddCharacter(masm, hash, c2, scratch);
4222 GenerateHashGetHash(masm, hash, scratch);
4223
4224 // Collect the two characters in a register.
4225 Register chars = c1;
4226 __ shl(c2, Immediate(kBitsPerByte));
4227 __ orl(chars, c2);
4228
4229 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4230 // hash: hash of two character string.
4231
4232 // Load the symbol table.
4233 Register symbol_table = c2;
4234 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
4235
4236 // Calculate capacity mask from the symbol table capacity.
4237 Register mask = scratch2;
4238 __ SmiToInteger32(mask,
4239 FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
4240 __ decl(mask);
4241
Steve Block44f0eee2011-05-26 01:26:41 +01004242 Register map = scratch4;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004243
4244 // Registers
4245 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4246 // hash: hash of two character string (32-bit int)
4247 // symbol_table: symbol table
4248 // mask: capacity mask (32-bit int)
Steve Block44f0eee2011-05-26 01:26:41 +01004249 // map: -
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004250 // scratch: -
4251
4252 // Perform a number of probes in the symbol table.
4253 static const int kProbes = 4;
4254 Label found_in_symbol_table;
4255 Label next_probe[kProbes];
4256 for (int i = 0; i < kProbes; i++) {
4257 // Calculate entry in symbol table.
4258 __ movl(scratch, hash);
4259 if (i > 0) {
4260 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
4261 }
4262 __ andl(scratch, mask);
4263
Steve Block44f0eee2011-05-26 01:26:41 +01004264 // Load the entry from the symbol table.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004265 Register candidate = scratch; // Scratch register contains candidate.
4266 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
4267 __ movq(candidate,
4268 FieldOperand(symbol_table,
4269 scratch,
4270 times_pointer_size,
4271 SymbolTable::kElementsStartOffset));
4272
4273 // If entry is undefined no string with this hash can be found.
Ben Murdoch257744e2011-11-30 15:57:28 +00004274 Label is_string;
Steve Block44f0eee2011-05-26 01:26:41 +01004275 __ CmpObjectType(candidate, ODDBALL_TYPE, map);
Ben Murdoch257744e2011-11-30 15:57:28 +00004276 __ j(not_equal, &is_string, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01004277
4278 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004279 __ j(equal, not_found);
Steve Block44f0eee2011-05-26 01:26:41 +01004280 // Must be null (deleted entry).
4281 __ jmp(&next_probe[i]);
4282
4283 __ bind(&is_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004284
4285 // If length is not 2 the string is not a candidate.
4286 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
4287 Smi::FromInt(2));
4288 __ j(not_equal, &next_probe[i]);
4289
4290 // We use kScratchRegister as a temporary register in assumption that
4291 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
4292 Register temp = kScratchRegister;
4293
4294 // Check that the candidate is a non-external ascii string.
Steve Block44f0eee2011-05-26 01:26:41 +01004295 __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004296 __ JumpIfInstanceTypeIsNotSequentialAscii(
4297 temp, temp, &next_probe[i]);
4298
4299 // Check if the two characters match.
4300 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
4301 __ andl(temp, Immediate(0x0000ffff));
4302 __ cmpl(chars, temp);
4303 __ j(equal, &found_in_symbol_table);
4304 __ bind(&next_probe[i]);
4305 }
4306
4307 // No matching 2 character string found by probing.
4308 __ jmp(not_found);
4309
4310 // Scratch register contains result when we fall through to here.
4311 Register result = scratch;
4312 __ bind(&found_in_symbol_table);
4313 if (!result.is(rax)) {
4314 __ movq(rax, result);
4315 }
4316}
4317
4318
4319void StringHelper::GenerateHashInit(MacroAssembler* masm,
4320 Register hash,
4321 Register character,
4322 Register scratch) {
4323 // hash = character + (character << 10);
4324 __ movl(hash, character);
4325 __ shll(hash, Immediate(10));
4326 __ addl(hash, character);
4327 // hash ^= hash >> 6;
4328 __ movl(scratch, hash);
4329 __ sarl(scratch, Immediate(6));
4330 __ xorl(hash, scratch);
4331}
4332
4333
4334void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4335 Register hash,
4336 Register character,
4337 Register scratch) {
4338 // hash += character;
4339 __ addl(hash, character);
4340 // hash += hash << 10;
4341 __ movl(scratch, hash);
4342 __ shll(scratch, Immediate(10));
4343 __ addl(hash, scratch);
4344 // hash ^= hash >> 6;
4345 __ movl(scratch, hash);
4346 __ sarl(scratch, Immediate(6));
4347 __ xorl(hash, scratch);
4348}
4349
4350
4351void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4352 Register hash,
4353 Register scratch) {
4354 // hash += hash << 3;
4355 __ leal(hash, Operand(hash, hash, times_8, 0));
4356 // hash ^= hash >> 11;
4357 __ movl(scratch, hash);
4358 __ sarl(scratch, Immediate(11));
4359 __ xorl(hash, scratch);
4360 // hash += hash << 15;
4361 __ movl(scratch, hash);
4362 __ shll(scratch, Immediate(15));
4363 __ addl(hash, scratch);
4364
4365 // if (hash == 0) hash = 27;
4366 Label hash_not_zero;
4367 __ j(not_zero, &hash_not_zero);
Ben Murdoch8b112d22011-06-08 16:22:53 +01004368 __ Set(hash, 27);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004369 __ bind(&hash_not_zero);
4370}
4371
4372void SubStringStub::Generate(MacroAssembler* masm) {
4373 Label runtime;
4374
4375 // Stack frame on entry.
4376 // rsp[0]: return address
4377 // rsp[8]: to
4378 // rsp[16]: from
4379 // rsp[24]: string
4380
4381 const int kToOffset = 1 * kPointerSize;
4382 const int kFromOffset = kToOffset + kPointerSize;
4383 const int kStringOffset = kFromOffset + kPointerSize;
4384 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
4385
4386 // Make sure first argument is a string.
4387 __ movq(rax, Operand(rsp, kStringOffset));
4388 STATIC_ASSERT(kSmiTag == 0);
4389 __ testl(rax, Immediate(kSmiTagMask));
4390 __ j(zero, &runtime);
4391 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4392 __ j(NegateCondition(is_string), &runtime);
4393
4394 // rax: string
4395 // rbx: instance type
4396 // Calculate length of sub string using the smi values.
4397 Label result_longer_than_two;
4398 __ movq(rcx, Operand(rsp, kToOffset));
4399 __ movq(rdx, Operand(rsp, kFromOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01004400 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004401
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004402 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004403 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
4404 Label return_rax;
4405 __ j(equal, &return_rax);
4406 // Special handling of sub-strings of length 1 and 2. One character strings
4407 // are handled in the runtime system (looked up in the single character
4408 // cache). Two character strings are looked for in the symbol cache.
4409 __ SmiToInteger32(rcx, rcx);
4410 __ cmpl(rcx, Immediate(2));
4411 __ j(greater, &result_longer_than_two);
4412 __ j(less, &runtime);
4413
4414 // Sub string of length 2 requested.
4415 // rax: string
4416 // rbx: instance type
4417 // rcx: sub string length (value is 2)
4418 // rdx: from index (smi)
4419 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
4420
4421 // Get the two characters forming the sub string.
4422 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
4423 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
4424 __ movzxbq(rcx,
4425 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
4426
4427 // Try to lookup two character string in symbol table.
4428 Label make_two_character_string;
4429 StringHelper::GenerateTwoCharacterSymbolTableProbe(
4430 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
4431 __ ret(3 * kPointerSize);
4432
4433 __ bind(&make_two_character_string);
4434 // Setup registers for allocating the two character string.
4435 __ movq(rax, Operand(rsp, kStringOffset));
4436 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
4437 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4438 __ Set(rcx, 2);
4439
4440 __ bind(&result_longer_than_two);
4441
4442 // rax: string
4443 // rbx: instance type
4444 // rcx: result string length
4445 // Check for flat ascii string
4446 Label non_ascii_flat;
4447 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
4448
4449 // Allocate the result.
4450 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
4451
4452 // rax: result string
4453 // rcx: result string length
4454 __ movq(rdx, rsi); // esi used by following code.
4455 // Locate first character of result.
4456 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4457 // Load string argument and locate character of sub string start.
4458 __ movq(rsi, Operand(rsp, kStringOffset));
4459 __ movq(rbx, Operand(rsp, kFromOffset));
4460 {
4461 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
4462 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4463 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4464 }
4465
4466 // rax: result string
4467 // rcx: result length
4468 // rdx: original value of rsi
4469 // rdi: first character of result
4470 // rsi: character of sub string start
4471 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4472 __ movq(rsi, rdx); // Restore rsi.
Steve Block44f0eee2011-05-26 01:26:41 +01004473 Counters* counters = masm->isolate()->counters();
4474 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004475 __ ret(kArgumentsSize);
4476
4477 __ bind(&non_ascii_flat);
4478 // rax: string
4479 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
4480 // rcx: result string length
4481 // Check for sequential two byte string
4482 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
4483 __ j(not_equal, &runtime);
4484
4485 // Allocate the result.
4486 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
4487
4488 // rax: result string
4489 // rcx: result string length
4490 __ movq(rdx, rsi); // esi used by following code.
4491 // Locate first character of result.
4492 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
4493 // Load string argument and locate character of sub string start.
4494 __ movq(rsi, Operand(rsp, kStringOffset));
4495 __ movq(rbx, Operand(rsp, kFromOffset));
4496 {
4497 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
4498 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4499 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4500 }
4501
4502 // rax: result string
4503 // rcx: result length
4504 // rdx: original value of rsi
4505 // rdi: first character of result
4506 // rsi: character of sub string start
4507 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
4508 __ movq(rsi, rdx); // Restore esi.
4509
4510 __ bind(&return_rax);
Steve Block44f0eee2011-05-26 01:26:41 +01004511 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004512 __ ret(kArgumentsSize);
4513
4514 // Just jump to runtime to create the sub string.
4515 __ bind(&runtime);
4516 __ TailCallRuntime(Runtime::kSubString, 3, 1);
4517}
4518
4519
Ben Murdoch257744e2011-11-30 15:57:28 +00004520void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
4521 Register left,
4522 Register right,
4523 Register scratch1,
4524 Register scratch2) {
4525 Register length = scratch1;
4526
4527 // Compare lengths.
4528 Label check_zero_length;
4529 __ movq(length, FieldOperand(left, String::kLengthOffset));
4530 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
4531 __ j(equal, &check_zero_length, Label::kNear);
4532 __ Move(rax, Smi::FromInt(NOT_EQUAL));
4533 __ ret(0);
4534
4535 // Check if the length is zero.
4536 Label compare_chars;
4537 __ bind(&check_zero_length);
4538 STATIC_ASSERT(kSmiTag == 0);
4539 __ SmiTest(length);
4540 __ j(not_zero, &compare_chars, Label::kNear);
4541 __ Move(rax, Smi::FromInt(EQUAL));
4542 __ ret(0);
4543
4544 // Compare characters.
4545 __ bind(&compare_chars);
4546 Label strings_not_equal;
4547 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
4548 &strings_not_equal, Label::kNear);
4549
4550 // Characters are equal.
4551 __ Move(rax, Smi::FromInt(EQUAL));
4552 __ ret(0);
4553
4554 // Characters are not equal.
4555 __ bind(&strings_not_equal);
4556 __ Move(rax, Smi::FromInt(NOT_EQUAL));
4557 __ ret(0);
4558}
4559
4560
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004561void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4562 Register left,
4563 Register right,
4564 Register scratch1,
4565 Register scratch2,
4566 Register scratch3,
4567 Register scratch4) {
4568 // Ensure that you can always subtract a string length from a non-negative
4569 // number (e.g. another length).
4570 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
4571
4572 // Find minimum length and length difference.
4573 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
4574 __ movq(scratch4, scratch1);
4575 __ SmiSub(scratch4,
4576 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004577 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004578 // Register scratch4 now holds left.length - right.length.
4579 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00004580 Label left_shorter;
4581 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004582 // The right string isn't longer that the left one.
4583 // Get the right string's length by subtracting the (non-negative) difference
4584 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004585 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004586 __ bind(&left_shorter);
4587 // Register scratch1 now holds Min(left.length, right.length).
4588 const Register min_length = scratch1;
4589
Ben Murdoch257744e2011-11-30 15:57:28 +00004590 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004591 // If min-length is zero, go directly to comparing lengths.
4592 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00004593 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004594
Ben Murdoch257744e2011-11-30 15:57:28 +00004595 // Compare loop.
4596 Label result_not_equal;
4597 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
4598 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004599
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004600 // Completed loop without finding different characters.
4601 // Compare lengths (precomputed).
4602 __ bind(&compare_lengths);
4603 __ SmiTest(length_difference);
Ben Murdoch257744e2011-11-30 15:57:28 +00004604 __ j(not_zero, &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004605
4606 // Result is EQUAL.
4607 __ Move(rax, Smi::FromInt(EQUAL));
4608 __ ret(0);
4609
Ben Murdoch257744e2011-11-30 15:57:28 +00004610 Label result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004611 __ bind(&result_not_equal);
4612 // Unequal comparison of left to right, either character or length.
Ben Murdoch257744e2011-11-30 15:57:28 +00004613 __ j(greater, &result_greater, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004614
4615 // Result is LESS.
4616 __ Move(rax, Smi::FromInt(LESS));
4617 __ ret(0);
4618
4619 // Result is GREATER.
4620 __ bind(&result_greater);
4621 __ Move(rax, Smi::FromInt(GREATER));
4622 __ ret(0);
4623}
4624
4625
Ben Murdoch257744e2011-11-30 15:57:28 +00004626void StringCompareStub::GenerateAsciiCharsCompareLoop(
4627 MacroAssembler* masm,
4628 Register left,
4629 Register right,
4630 Register length,
4631 Register scratch,
4632 Label* chars_not_equal,
4633 Label::Distance near_jump) {
4634 // Change index to run from -length to -1 by adding length to string
4635 // start. This means that loop ends when index reaches zero, which
4636 // doesn't need an additional compare.
4637 __ SmiToInteger32(length, length);
4638 __ lea(left,
4639 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
4640 __ lea(right,
4641 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
4642 __ neg(length);
4643 Register index = length; // index = -length;
4644
4645 // Compare loop.
4646 Label loop;
4647 __ bind(&loop);
4648 __ movb(scratch, Operand(left, index, times_1, 0));
4649 __ cmpb(scratch, Operand(right, index, times_1, 0));
4650 __ j(not_equal, chars_not_equal, near_jump);
4651 __ addq(index, Immediate(1));
4652 __ j(not_zero, &loop);
4653}
4654
4655
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004656void StringCompareStub::Generate(MacroAssembler* masm) {
4657 Label runtime;
4658
4659 // Stack frame on entry.
4660 // rsp[0]: return address
4661 // rsp[8]: right string
4662 // rsp[16]: left string
4663
4664 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
4665 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
4666
4667 // Check for identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00004668 Label not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004669 __ cmpq(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00004670 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004671 __ Move(rax, Smi::FromInt(EQUAL));
Steve Block44f0eee2011-05-26 01:26:41 +01004672 Counters* counters = masm->isolate()->counters();
4673 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004674 __ ret(2 * kPointerSize);
4675
4676 __ bind(&not_same);
4677
4678 // Check that both are sequential ASCII strings.
4679 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4680
4681 // Inline comparison of ascii strings.
Steve Block44f0eee2011-05-26 01:26:41 +01004682 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004683 // Drop arguments from the stack
4684 __ pop(rcx);
4685 __ addq(rsp, Immediate(2 * kPointerSize));
4686 __ push(rcx);
4687 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4688
4689 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4690 // tagged as a small integer.
4691 __ bind(&runtime);
4692 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4693}
4694
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004695
Ben Murdochb0fe1622011-05-05 13:52:32 +01004696void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004697 ASSERT(state_ == CompareIC::SMIS);
Ben Murdoch257744e2011-11-30 15:57:28 +00004698 Label miss;
4699 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004700
4701 if (GetCondition() == equal) {
4702 // For equality we do not care about the sign of the result.
4703 __ subq(rax, rdx);
4704 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004705 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01004706 __ subq(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00004707 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004708 // Correct sign of result in case of overflow.
4709 __ SmiNot(rdx, rdx);
4710 __ bind(&done);
4711 __ movq(rax, rdx);
4712 }
4713 __ ret(0);
4714
4715 __ bind(&miss);
4716 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004717}
4718
4719
4720void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004721 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
4722
Ben Murdoch257744e2011-11-30 15:57:28 +00004723 Label generic_stub;
4724 Label unordered;
4725 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01004726 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004727 __ j(either_smi, &generic_stub, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004728
4729 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004730 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004731 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004732 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004733
4734 // Load left and right operand
4735 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
4736 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
4737
4738 // Compare operands
4739 __ ucomisd(xmm0, xmm1);
4740
4741 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00004742 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004743
4744 // Return a result of -1, 0, or 1, based on EFLAGS.
4745 // Performing mov, because xor would destroy the flag register.
4746 __ movl(rax, Immediate(0));
4747 __ movl(rcx, Immediate(0));
4748 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
4749 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
4750 __ ret(0);
4751
4752 __ bind(&unordered);
4753
4754 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
4755 __ bind(&generic_stub);
4756 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4757
4758 __ bind(&miss);
4759 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004760}
4761
4762
Ben Murdoch257744e2011-11-30 15:57:28 +00004763void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
4764 ASSERT(state_ == CompareIC::SYMBOLS);
4765 ASSERT(GetCondition() == equal);
4766
4767 // Registers containing left and right operands respectively.
4768 Register left = rdx;
4769 Register right = rax;
4770 Register tmp1 = rcx;
4771 Register tmp2 = rbx;
4772
4773 // Check that both operands are heap objects.
4774 Label miss;
4775 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4776 __ j(cond, &miss, Label::kNear);
4777
4778 // Check that both operands are symbols.
4779 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4780 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4781 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4782 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4783 STATIC_ASSERT(kSymbolTag != 0);
4784 __ and_(tmp1, tmp2);
4785 __ testb(tmp1, Immediate(kIsSymbolMask));
4786 __ j(zero, &miss, Label::kNear);
4787
4788 // Symbols are compared by identity.
4789 Label done;
4790 __ cmpq(left, right);
4791 // Make sure rax is non-zero. At this point input operands are
4792 // guaranteed to be non-zero.
4793 ASSERT(right.is(rax));
4794 __ j(not_equal, &done, Label::kNear);
4795 STATIC_ASSERT(EQUAL == 0);
4796 STATIC_ASSERT(kSmiTag == 0);
4797 __ Move(rax, Smi::FromInt(EQUAL));
4798 __ bind(&done);
4799 __ ret(0);
4800
4801 __ bind(&miss);
4802 GenerateMiss(masm);
4803}
4804
4805
4806void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4807 ASSERT(state_ == CompareIC::STRINGS);
4808 ASSERT(GetCondition() == equal);
4809 Label miss;
4810
4811 // Registers containing left and right operands respectively.
4812 Register left = rdx;
4813 Register right = rax;
4814 Register tmp1 = rcx;
4815 Register tmp2 = rbx;
4816 Register tmp3 = rdi;
4817
4818 // Check that both operands are heap objects.
4819 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4820 __ j(cond, &miss);
4821
4822 // Check that both operands are strings. This leaves the instance
4823 // types loaded in tmp1 and tmp2.
4824 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4825 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4826 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4827 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4828 __ movq(tmp3, tmp1);
4829 STATIC_ASSERT(kNotStringTag != 0);
4830 __ or_(tmp3, tmp2);
4831 __ testb(tmp3, Immediate(kIsNotStringMask));
4832 __ j(not_zero, &miss);
4833
4834 // Fast check for identical strings.
4835 Label not_same;
4836 __ cmpq(left, right);
4837 __ j(not_equal, &not_same, Label::kNear);
4838 STATIC_ASSERT(EQUAL == 0);
4839 STATIC_ASSERT(kSmiTag == 0);
4840 __ Move(rax, Smi::FromInt(EQUAL));
4841 __ ret(0);
4842
4843 // Handle not identical strings.
4844 __ bind(&not_same);
4845
4846 // Check that both strings are symbols. If they are, we're done
4847 // because we already know they are not identical.
4848 Label do_compare;
4849 STATIC_ASSERT(kSymbolTag != 0);
4850 __ and_(tmp1, tmp2);
4851 __ testb(tmp1, Immediate(kIsSymbolMask));
4852 __ j(zero, &do_compare, Label::kNear);
4853 // Make sure rax is non-zero. At this point input operands are
4854 // guaranteed to be non-zero.
4855 ASSERT(right.is(rax));
4856 __ ret(0);
4857
4858 // Check that both strings are sequential ASCII.
4859 Label runtime;
4860 __ bind(&do_compare);
4861 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4862
4863 // Compare flat ASCII strings. Returns when done.
4864 StringCompareStub::GenerateFlatAsciiStringEquals(
4865 masm, left, right, tmp1, tmp2);
4866
4867 // Handle more complex cases in runtime.
4868 __ bind(&runtime);
4869 __ pop(tmp1); // Return address.
4870 __ push(left);
4871 __ push(right);
4872 __ push(tmp1);
4873 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4874
4875 __ bind(&miss);
4876 GenerateMiss(masm);
4877}
4878
4879
Ben Murdochb0fe1622011-05-05 13:52:32 +01004880void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004881 ASSERT(state_ == CompareIC::OBJECTS);
Ben Murdoch257744e2011-11-30 15:57:28 +00004882 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01004883 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00004884 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004885
4886 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004887 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004888 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00004889 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004890
4891 ASSERT(GetCondition() == equal);
4892 __ subq(rax, rdx);
4893 __ ret(0);
4894
4895 __ bind(&miss);
4896 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004897}
4898
4899
4900void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004901 // Save the registers.
4902 __ pop(rcx);
4903 __ push(rdx);
4904 __ push(rax);
4905 __ push(rcx);
4906
4907 // Call the runtime system in a fresh internal frame.
Steve Block44f0eee2011-05-26 01:26:41 +01004908 ExternalReference miss =
4909 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01004910 __ EnterInternalFrame();
4911 __ push(rdx);
4912 __ push(rax);
4913 __ Push(Smi::FromInt(op_));
4914 __ CallExternalReference(miss, 3);
4915 __ LeaveInternalFrame();
4916
4917 // Compute the entry point of the rewritten stub.
4918 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4919
4920 // Restore registers.
4921 __ pop(rcx);
4922 __ pop(rax);
4923 __ pop(rdx);
4924 __ push(rcx);
4925
4926 // Do a tail call to the rewritten stub.
4927 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004928}
4929
Steve Block1e0659c2011-05-24 12:43:12 +01004930
Ben Murdoch257744e2011-11-30 15:57:28 +00004931MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
4932 MacroAssembler* masm,
4933 Label* miss,
4934 Label* done,
4935 Register properties,
4936 String* name,
4937 Register r0) {
4938 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4939 // not equal to the name and kProbes-th slot is not used (its name is the
4940 // undefined value), it guarantees the hash table doesn't contain the
4941 // property. It's true even if some slots represent deleted properties
4942 // (their names are the null value).
4943 for (int i = 0; i < kInlinedProbes; i++) {
4944 // r0 points to properties hash.
4945 // Compute the masked index: (hash + i + i * i) & mask.
4946 Register index = r0;
4947 // Capacity is smi 2^n.
4948 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
4949 __ decl(index);
4950 __ and_(index,
4951 Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
4952
4953 // Scale the index by multiplying by the entry size.
4954 ASSERT(StringDictionary::kEntrySize == 3);
4955 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
4956
4957 Register entity_name = r0;
4958 // Having undefined at this place means the name is not contained.
4959 ASSERT_EQ(kSmiTagSize, 1);
4960 __ movq(entity_name, Operand(properties,
4961 index,
4962 times_pointer_size,
4963 kElementsStartOffset - kHeapObjectTag));
4964 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
4965 __ j(equal, done);
4966
4967 // Stop if found the property.
4968 __ Cmp(entity_name, Handle<String>(name));
4969 __ j(equal, miss);
4970
4971 // Check if the entry name is not a symbol.
4972 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4973 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4974 Immediate(kIsSymbolMask));
4975 __ j(zero, miss);
4976 }
4977
4978 StringDictionaryLookupStub stub(properties,
4979 r0,
4980 r0,
4981 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
4982 __ Push(Handle<Object>(name));
4983 __ push(Immediate(name->Hash()));
4984 MaybeObject* result = masm->TryCallStub(&stub);
4985 if (result->IsFailure()) return result;
4986 __ testq(r0, r0);
4987 __ j(not_zero, miss);
4988 __ jmp(done);
4989 return result;
4990}
4991
4992
4993// Probe the string dictionary in the |elements| register. Jump to the
4994// |done| label if a property with the given name is found leaving the
4995// index into the dictionary in |r1|. Jump to the |miss| label
4996// otherwise.
4997void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
4998 Label* miss,
4999 Label* done,
5000 Register elements,
5001 Register name,
5002 Register r0,
5003 Register r1) {
5004 // Assert that name contains a string.
5005 if (FLAG_debug_code) __ AbortIfNotString(name);
5006
5007 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
5008 __ decl(r0);
5009
5010 for (int i = 0; i < kInlinedProbes; i++) {
5011 // Compute the masked index: (hash + i + i * i) & mask.
5012 __ movl(r1, FieldOperand(name, String::kHashFieldOffset));
5013 __ shrl(r1, Immediate(String::kHashShift));
5014 if (i > 0) {
5015 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
5016 }
5017 __ and_(r1, r0);
5018
5019 // Scale the index by multiplying by the entry size.
5020 ASSERT(StringDictionary::kEntrySize == 3);
5021 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
5022
5023 // Check if the key is identical to the name.
5024 __ cmpq(name, Operand(elements, r1, times_pointer_size,
5025 kElementsStartOffset - kHeapObjectTag));
5026 __ j(equal, done);
5027 }
5028
5029 StringDictionaryLookupStub stub(elements,
5030 r0,
5031 r1,
5032 POSITIVE_LOOKUP);
5033 __ push(name);
5034 __ movl(r0, FieldOperand(name, String::kHashFieldOffset));
5035 __ shrl(r0, Immediate(String::kHashShift));
5036 __ push(r0);
5037 __ CallStub(&stub);
5038
5039 __ testq(r0, r0);
5040 __ j(zero, miss);
5041 __ jmp(done);
5042}
5043
5044
5045void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
5046 // Stack frame on entry:
5047 // esp[0 * kPointerSize]: return address.
5048 // esp[1 * kPointerSize]: key's hash.
5049 // esp[2 * kPointerSize]: key.
5050 // Registers:
5051 // dictionary_: StringDictionary to probe.
5052 // result_: used as scratch.
5053 // index_: will hold an index of entry if lookup is successful.
5054 // might alias with result_.
5055 // Returns:
5056 // result_ is zero if lookup failed, non zero otherwise.
5057
5058 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5059
5060 Register scratch = result_;
5061
5062 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
5063 __ decl(scratch);
5064 __ push(scratch);
5065
5066 // If names of slots in range from 1 to kProbes - 1 for the hash value are
5067 // not equal to the name and kProbes-th slot is not used (its name is the
5068 // undefined value), it guarantees the hash table doesn't contain the
5069 // property. It's true even if some slots represent deleted properties
5070 // (their names are the null value).
5071 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
5072 // Compute the masked index: (hash + i + i * i) & mask.
5073 __ movq(scratch, Operand(rsp, 2 * kPointerSize));
5074 if (i > 0) {
5075 __ addl(scratch, Immediate(StringDictionary::GetProbeOffset(i)));
5076 }
5077 __ and_(scratch, Operand(rsp, 0));
5078
5079 // Scale the index by multiplying by the entry size.
5080 ASSERT(StringDictionary::kEntrySize == 3);
5081 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
5082
5083 // Having undefined at this place means the name is not contained.
5084 __ movq(scratch, Operand(dictionary_,
5085 index_,
5086 times_pointer_size,
5087 kElementsStartOffset - kHeapObjectTag));
5088
5089 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
5090 __ j(equal, &not_in_dictionary);
5091
5092 // Stop if found the property.
5093 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize));
5094 __ j(equal, &in_dictionary);
5095
5096 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
5097 // If we hit a non symbol key during negative lookup
5098 // we have to bailout as this key might be equal to the
5099 // key we are looking for.
5100
5101 // Check if the entry name is not a symbol.
5102 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5103 __ testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
5104 Immediate(kIsSymbolMask));
5105 __ j(zero, &maybe_in_dictionary);
5106 }
5107 }
5108
5109 __ bind(&maybe_in_dictionary);
5110 // If we are doing negative lookup then probing failure should be
5111 // treated as a lookup success. For positive lookup probing failure
5112 // should be treated as lookup failure.
5113 if (mode_ == POSITIVE_LOOKUP) {
5114 __ movq(scratch, Immediate(0));
5115 __ Drop(1);
5116 __ ret(2 * kPointerSize);
5117 }
5118
5119 __ bind(&in_dictionary);
5120 __ movq(scratch, Immediate(1));
5121 __ Drop(1);
5122 __ ret(2 * kPointerSize);
5123
5124 __ bind(&not_in_dictionary);
5125 __ movq(scratch, Immediate(0));
5126 __ Drop(1);
5127 __ ret(2 * kPointerSize);
5128}
5129
5130
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005131#undef __
5132
5133} } // namespace v8::internal
5134
5135#endif // V8_TARGET_ARCH_X64