blob: eb9297829f904024f3fc79dd0e26c6d9b9e9112b [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
34#include "regexp-macro-assembler.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010040
41void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax.
43 NearLabel check_heap_number, call_builtin;
44 __ SmiTest(rax);
45 __ j(not_zero, &check_heap_number);
46 __ Ret();
47
48 __ bind(&check_heap_number);
49 __ Move(rbx, Factory::heap_number_map());
50 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
51 __ j(not_equal, &call_builtin);
52 __ Ret();
53
54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address.
56 __ push(rax);
57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59}
60
61
Kristian Monsen80d68ea2010-09-08 11:05:35 +010062void FastNewClosureStub::Generate(MacroAssembler* masm) {
63 // Create a new closure from the given function info in new
64 // space. Set the context to the current context in rsi.
65 Label gc;
66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
67
68 // Get the function info from the stack.
69 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
71 // Compute the function map in the current global context and set that
72 // as the map of the allocated object.
73 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
74 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
75 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
76 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
77
78 // Initialize the rest of the function. We don't have to update the
79 // write barrier because the allocated object is in new space.
80 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
81 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010083 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
84 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
85 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
86 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
87 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
88 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010090
91 // Initialize the code pointer in the function to be the one
92 // found in the shared function info object.
93 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
94 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
95 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
96
97
98 // Return and remove the on-stack parameter.
99 __ ret(1 * kPointerSize);
100
101 // Create a new closure through the slower runtime call.
102 __ bind(&gc);
103 __ pop(rcx); // Temporarily remove return address.
104 __ pop(rdx);
105 __ push(rsi);
106 __ push(rdx);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800107 __ Push(Factory::false_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100108 __ push(rcx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800109 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100110}
111
112
113void FastNewContextStub::Generate(MacroAssembler* masm) {
114 // Try to allocate the context in new space.
115 Label gc;
116 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
117 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
118 rax, rbx, rcx, &gc, TAG_OBJECT);
119
120 // Get the function from the stack.
121 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
122
123 // Setup the object header.
124 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
125 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
126 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
127
128 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100129 __ Set(rbx, 0); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100130 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
131 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
132 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
133 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
134
135 // Copy the global object from the surrounding context.
136 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
137 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
138
139 // Initialize the rest of the slots to undefined.
140 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
141 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
142 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
143 }
144
145 // Return and remove the on-stack parameter.
146 __ movq(rsi, rax);
147 __ ret(1 * kPointerSize);
148
149 // Need to collect. Call into runtime system.
150 __ bind(&gc);
151 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
152}
153
154
155void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
156 // Stack layout on entry:
157 //
158 // [rsp + kPointerSize]: constant elements.
159 // [rsp + (2 * kPointerSize)]: literal index.
160 // [rsp + (3 * kPointerSize)]: literals array.
161
162 // All sizes here are multiples of kPointerSize.
163 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
164 int size = JSArray::kSize + elements_size;
165
166 // Load boilerplate object into rcx and check if we need to create a
167 // boilerplate.
168 Label slow_case;
169 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
170 __ movq(rax, Operand(rsp, 2 * kPointerSize));
171 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
172 __ movq(rcx,
173 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
174 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
175 __ j(equal, &slow_case);
176
177 if (FLAG_debug_code) {
178 const char* message;
179 Heap::RootListIndex expected_map_index;
180 if (mode_ == CLONE_ELEMENTS) {
181 message = "Expected (writable) fixed array";
182 expected_map_index = Heap::kFixedArrayMapRootIndex;
183 } else {
184 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
185 message = "Expected copy-on-write fixed array";
186 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
187 }
188 __ push(rcx);
189 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
190 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
191 expected_map_index);
192 __ Assert(equal, message);
193 __ pop(rcx);
194 }
195
196 // Allocate both the JS array and the elements array in one big
197 // allocation. This avoids multiple limit checks.
198 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
199
200 // Copy the JS array part.
201 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
202 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
203 __ movq(rbx, FieldOperand(rcx, i));
204 __ movq(FieldOperand(rax, i), rbx);
205 }
206 }
207
208 if (length_ > 0) {
209 // Get hold of the elements array of the boilerplate and setup the
210 // elements pointer in the resulting object.
211 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
212 __ lea(rdx, Operand(rax, JSArray::kSize));
213 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
214
215 // Copy the elements array.
216 for (int i = 0; i < elements_size; i += kPointerSize) {
217 __ movq(rbx, FieldOperand(rcx, i));
218 __ movq(FieldOperand(rdx, i), rbx);
219 }
220 }
221
222 // Return and remove the on-stack parameters.
223 __ ret(3 * kPointerSize);
224
225 __ bind(&slow_case);
226 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
227}
228
229
230void ToBooleanStub::Generate(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100231 NearLabel false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100232 __ movq(rax, Operand(rsp, 1 * kPointerSize));
233
234 // 'null' => false.
235 __ CompareRoot(rax, Heap::kNullValueRootIndex);
236 __ j(equal, &false_result);
237
238 // Get the map and type of the heap object.
239 // We don't use CmpObjectType because we manipulate the type field.
240 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
241 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
242
243 // Undetectable => false.
244 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
245 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
246 __ j(not_zero, &false_result);
247
248 // JavaScript object => true.
249 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
250 __ j(above_equal, &true_result);
251
252 // String value => false iff empty.
253 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
254 __ j(above_equal, &not_string);
255 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
256 __ SmiTest(rdx);
257 __ j(zero, &false_result);
258 __ jmp(&true_result);
259
260 __ bind(&not_string);
261 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
262 __ j(not_equal, &true_result);
263 // HeapNumber => false iff +0, -0, or NaN.
264 // These three cases set the zero flag when compared to zero using ucomisd.
265 __ xorpd(xmm0, xmm0);
266 __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
267 __ j(zero, &false_result);
268 // Fall through to |true_result|.
269
270 // Return 1/0 for true/false in rax.
271 __ bind(&true_result);
272 __ movq(rax, Immediate(1));
273 __ ret(1 * kPointerSize);
274 __ bind(&false_result);
Steve Block9fac8402011-05-12 15:51:54 +0100275 __ Set(rax, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100276 __ ret(1 * kPointerSize);
277}
278
279
280const char* GenericBinaryOpStub::GetName() {
281 if (name_ != NULL) return name_;
282 const int kMaxNameLength = 100;
283 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
284 if (name_ == NULL) return "OOM";
285 const char* op_name = Token::Name(op_);
286 const char* overwrite_name;
287 switch (mode_) {
288 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
289 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
290 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
291 default: overwrite_name = "UnknownOverwrite"; break;
292 }
293
294 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
295 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
296 op_name,
297 overwrite_name,
298 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
299 args_in_registers_ ? "RegArgs" : "StackArgs",
300 args_reversed_ ? "_R" : "",
301 static_operands_type_.ToString(),
302 BinaryOpIC::GetName(runtime_operands_type_));
303 return name_;
304}
305
306
307void GenericBinaryOpStub::GenerateCall(
308 MacroAssembler* masm,
309 Register left,
310 Register right) {
311 if (!ArgsInRegistersSupported()) {
312 // Pass arguments on the stack.
313 __ push(left);
314 __ push(right);
315 } else {
316 // The calling convention with registers is left in rdx and right in rax.
317 Register left_arg = rdx;
318 Register right_arg = rax;
319 if (!(left.is(left_arg) && right.is(right_arg))) {
320 if (left.is(right_arg) && right.is(left_arg)) {
321 if (IsOperationCommutative()) {
322 SetArgsReversed();
323 } else {
324 __ xchg(left, right);
325 }
326 } else if (left.is(left_arg)) {
327 __ movq(right_arg, right);
328 } else if (right.is(right_arg)) {
329 __ movq(left_arg, left);
330 } else if (left.is(right_arg)) {
331 if (IsOperationCommutative()) {
332 __ movq(left_arg, right);
333 SetArgsReversed();
334 } else {
335 // Order of moves important to avoid destroying left argument.
336 __ movq(left_arg, left);
337 __ movq(right_arg, right);
338 }
339 } else if (right.is(left_arg)) {
340 if (IsOperationCommutative()) {
341 __ movq(right_arg, left);
342 SetArgsReversed();
343 } else {
344 // Order of moves important to avoid destroying right argument.
345 __ movq(right_arg, right);
346 __ movq(left_arg, left);
347 }
348 } else {
349 // Order of moves is not important.
350 __ movq(left_arg, left);
351 __ movq(right_arg, right);
352 }
353 }
354
355 // Update flags to indicate that arguments are in registers.
356 SetArgsInRegisters();
357 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
358 }
359
360 // Call the stub.
361 __ CallStub(this);
362}
363
364
365void GenericBinaryOpStub::GenerateCall(
366 MacroAssembler* masm,
367 Register left,
368 Smi* right) {
369 if (!ArgsInRegistersSupported()) {
370 // Pass arguments on the stack.
371 __ push(left);
372 __ Push(right);
373 } else {
374 // The calling convention with registers is left in rdx and right in rax.
375 Register left_arg = rdx;
376 Register right_arg = rax;
377 if (left.is(left_arg)) {
378 __ Move(right_arg, right);
379 } else if (left.is(right_arg) && IsOperationCommutative()) {
380 __ Move(left_arg, right);
381 SetArgsReversed();
382 } else {
383 // For non-commutative operations, left and right_arg might be
384 // the same register. Therefore, the order of the moves is
385 // important here in order to not overwrite left before moving
386 // it to left_arg.
387 __ movq(left_arg, left);
388 __ Move(right_arg, right);
389 }
390
391 // Update flags to indicate that arguments are in registers.
392 SetArgsInRegisters();
393 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
394 }
395
396 // Call the stub.
397 __ CallStub(this);
398}
399
400
401void GenericBinaryOpStub::GenerateCall(
402 MacroAssembler* masm,
403 Smi* left,
404 Register right) {
405 if (!ArgsInRegistersSupported()) {
406 // Pass arguments on the stack.
407 __ Push(left);
408 __ push(right);
409 } else {
410 // The calling convention with registers is left in rdx and right in rax.
411 Register left_arg = rdx;
412 Register right_arg = rax;
413 if (right.is(right_arg)) {
414 __ Move(left_arg, left);
415 } else if (right.is(left_arg) && IsOperationCommutative()) {
416 __ Move(right_arg, left);
417 SetArgsReversed();
418 } else {
419 // For non-commutative operations, right and left_arg might be
420 // the same register. Therefore, the order of the moves is
421 // important here in order to not overwrite right before moving
422 // it to right_arg.
423 __ movq(right_arg, right);
424 __ Move(left_arg, left);
425 }
426 // Update flags to indicate that arguments are in registers.
427 SetArgsInRegisters();
428 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
429 }
430
431 // Call the stub.
432 __ CallStub(this);
433}
434
435
436class FloatingPointHelper : public AllStatic {
437 public:
438 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
439 // If the operands are not both numbers, jump to not_numbers.
440 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
441 // NumberOperands assumes both are smis or heap numbers.
442 static void LoadSSE2SmiOperands(MacroAssembler* masm);
443 static void LoadSSE2NumberOperands(MacroAssembler* masm);
444 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
445 Label* not_numbers);
446
447 // Takes the operands in rdx and rax and loads them as integers in rax
448 // and rcx.
449 static void LoadAsIntegers(MacroAssembler* masm,
450 Label* operand_conversion_failure,
451 Register heap_number_map);
452 // As above, but we know the operands to be numbers. In that case,
453 // conversion can't fail.
454 static void LoadNumbersAsIntegers(MacroAssembler* masm);
455};
456
457
458void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
459 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
460 // dividend in rax and rdx free for the division. Use rax, rbx for those.
461 Comment load_comment(masm, "-- Load arguments");
462 Register left = rdx;
463 Register right = rax;
464 if (op_ == Token::DIV || op_ == Token::MOD) {
465 left = rax;
466 right = rbx;
467 if (HasArgsInRegisters()) {
468 __ movq(rbx, rax);
469 __ movq(rax, rdx);
470 }
471 }
472 if (!HasArgsInRegisters()) {
473 __ movq(right, Operand(rsp, 1 * kPointerSize));
474 __ movq(left, Operand(rsp, 2 * kPointerSize));
475 }
476
477 Label not_smis;
478 // 2. Smi check both operands.
479 if (static_operands_type_.IsSmi()) {
480 // Skip smi check if we know that both arguments are smis.
481 if (FLAG_debug_code) {
482 __ AbortIfNotSmi(left);
483 __ AbortIfNotSmi(right);
484 }
485 if (op_ == Token::BIT_OR) {
486 // Handle OR here, since we do extra smi-checking in the or code below.
487 __ SmiOr(right, right, left);
488 GenerateReturn(masm);
489 return;
490 }
491 } else {
492 if (op_ != Token::BIT_OR) {
493 // Skip the check for OR as it is better combined with the
494 // actual operation.
495 Comment smi_check_comment(masm, "-- Smi check arguments");
496 __ JumpIfNotBothSmi(left, right, &not_smis);
497 }
498 }
499
500 // 3. Operands are both smis (except for OR), perform the operation leaving
501 // the result in rax and check the result if necessary.
502 Comment perform_smi(masm, "-- Perform smi operation");
503 Label use_fp_on_smis;
504 switch (op_) {
505 case Token::ADD: {
506 ASSERT(right.is(rax));
507 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
508 break;
509 }
510
511 case Token::SUB: {
512 __ SmiSub(left, left, right, &use_fp_on_smis);
513 __ movq(rax, left);
514 break;
515 }
516
517 case Token::MUL:
518 ASSERT(right.is(rax));
519 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
520 break;
521
522 case Token::DIV:
523 ASSERT(left.is(rax));
524 __ SmiDiv(left, left, right, &use_fp_on_smis);
525 break;
526
527 case Token::MOD:
528 ASSERT(left.is(rax));
529 __ SmiMod(left, left, right, slow);
530 break;
531
532 case Token::BIT_OR:
533 ASSERT(right.is(rax));
534 __ movq(rcx, right); // Save the right operand.
535 __ SmiOr(right, right, left); // BIT_OR is commutative.
536 __ testb(right, Immediate(kSmiTagMask));
537 __ j(not_zero, &not_smis);
538 break;
539
540 case Token::BIT_AND:
541 ASSERT(right.is(rax));
542 __ SmiAnd(right, right, left); // BIT_AND is commutative.
543 break;
544
545 case Token::BIT_XOR:
546 ASSERT(right.is(rax));
547 __ SmiXor(right, right, left); // BIT_XOR is commutative.
548 break;
549
550 case Token::SHL:
551 case Token::SHR:
552 case Token::SAR:
553 switch (op_) {
554 case Token::SAR:
555 __ SmiShiftArithmeticRight(left, left, right);
556 break;
557 case Token::SHR:
558 __ SmiShiftLogicalRight(left, left, right, slow);
559 break;
560 case Token::SHL:
561 __ SmiShiftLeft(left, left, right);
562 break;
563 default:
564 UNREACHABLE();
565 }
566 __ movq(rax, left);
567 break;
568
569 default:
570 UNREACHABLE();
571 break;
572 }
573
574 // 4. Emit return of result in rax.
575 GenerateReturn(masm);
576
577 // 5. For some operations emit inline code to perform floating point
578 // operations on known smis (e.g., if the result of the operation
579 // overflowed the smi range).
580 switch (op_) {
581 case Token::ADD:
582 case Token::SUB:
583 case Token::MUL:
584 case Token::DIV: {
585 ASSERT(use_fp_on_smis.is_linked());
586 __ bind(&use_fp_on_smis);
587 if (op_ == Token::DIV) {
588 __ movq(rdx, rax);
589 __ movq(rax, rbx);
590 }
591 // left is rdx, right is rax.
592 __ AllocateHeapNumber(rbx, rcx, slow);
593 FloatingPointHelper::LoadSSE2SmiOperands(masm);
594 switch (op_) {
595 case Token::ADD: __ addsd(xmm0, xmm1); break;
596 case Token::SUB: __ subsd(xmm0, xmm1); break;
597 case Token::MUL: __ mulsd(xmm0, xmm1); break;
598 case Token::DIV: __ divsd(xmm0, xmm1); break;
599 default: UNREACHABLE();
600 }
601 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
602 __ movq(rax, rbx);
603 GenerateReturn(masm);
604 }
605 default:
606 break;
607 }
608
609 // 6. Non-smi operands, fall out to the non-smi code with the operands in
610 // rdx and rax.
611 Comment done_comment(masm, "-- Enter non-smi code");
612 __ bind(&not_smis);
613
614 switch (op_) {
615 case Token::DIV:
616 case Token::MOD:
617 // Operands are in rax, rbx at this point.
618 __ movq(rdx, rax);
619 __ movq(rax, rbx);
620 break;
621
622 case Token::BIT_OR:
623 // Right operand is saved in rcx and rax was destroyed by the smi
624 // operation.
625 __ movq(rax, rcx);
626 break;
627
628 default:
629 break;
630 }
631}
632
633
634void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
635 Label call_runtime;
636
637 if (ShouldGenerateSmiCode()) {
638 GenerateSmiCode(masm, &call_runtime);
639 } else if (op_ != Token::MOD) {
640 if (!HasArgsInRegisters()) {
641 GenerateLoadArguments(masm);
642 }
643 }
644 // Floating point case.
645 if (ShouldGenerateFPCode()) {
646 switch (op_) {
647 case Token::ADD:
648 case Token::SUB:
649 case Token::MUL:
650 case Token::DIV: {
651 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
652 HasSmiCodeInStub()) {
653 // Execution reaches this point when the first non-smi argument occurs
654 // (and only if smi code is generated). This is the right moment to
655 // patch to HEAP_NUMBERS state. The transition is attempted only for
656 // the four basic operations. The stub stays in the DEFAULT state
657 // forever for all other operations (also if smi code is skipped).
658 GenerateTypeTransition(masm);
659 break;
660 }
661
662 Label not_floats;
663 // rax: y
664 // rdx: x
665 if (static_operands_type_.IsNumber()) {
666 if (FLAG_debug_code) {
667 // Assert at runtime that inputs are only numbers.
668 __ AbortIfNotNumber(rdx);
669 __ AbortIfNotNumber(rax);
670 }
671 FloatingPointHelper::LoadSSE2NumberOperands(masm);
672 } else {
673 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &call_runtime);
674 }
675
676 switch (op_) {
677 case Token::ADD: __ addsd(xmm0, xmm1); break;
678 case Token::SUB: __ subsd(xmm0, xmm1); break;
679 case Token::MUL: __ mulsd(xmm0, xmm1); break;
680 case Token::DIV: __ divsd(xmm0, xmm1); break;
681 default: UNREACHABLE();
682 }
683 // Allocate a heap number, if needed.
684 Label skip_allocation;
685 OverwriteMode mode = mode_;
686 if (HasArgsReversed()) {
687 if (mode == OVERWRITE_RIGHT) {
688 mode = OVERWRITE_LEFT;
689 } else if (mode == OVERWRITE_LEFT) {
690 mode = OVERWRITE_RIGHT;
691 }
692 }
693 switch (mode) {
694 case OVERWRITE_LEFT:
695 __ JumpIfNotSmi(rdx, &skip_allocation);
696 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
697 __ movq(rdx, rbx);
698 __ bind(&skip_allocation);
699 __ movq(rax, rdx);
700 break;
701 case OVERWRITE_RIGHT:
702 // If the argument in rax is already an object, we skip the
703 // allocation of a heap number.
704 __ JumpIfNotSmi(rax, &skip_allocation);
705 // Fall through!
706 case NO_OVERWRITE:
707 // Allocate a heap number for the result. Keep rax and rdx intact
708 // for the possible runtime call.
709 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
710 __ movq(rax, rbx);
711 __ bind(&skip_allocation);
712 break;
713 default: UNREACHABLE();
714 }
715 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
716 GenerateReturn(masm);
717 __ bind(&not_floats);
718 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
719 !HasSmiCodeInStub()) {
720 // Execution reaches this point when the first non-number argument
721 // occurs (and only if smi code is skipped from the stub, otherwise
722 // the patching has already been done earlier in this case branch).
723 // A perfect moment to try patching to STRINGS for ADD operation.
724 if (op_ == Token::ADD) {
725 GenerateTypeTransition(masm);
726 }
727 }
728 break;
729 }
730 case Token::MOD: {
731 // For MOD we go directly to runtime in the non-smi case.
732 break;
733 }
734 case Token::BIT_OR:
735 case Token::BIT_AND:
736 case Token::BIT_XOR:
737 case Token::SAR:
738 case Token::SHL:
739 case Token::SHR: {
740 Label skip_allocation, non_smi_shr_result;
741 Register heap_number_map = r9;
742 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
743 if (static_operands_type_.IsNumber()) {
744 if (FLAG_debug_code) {
745 // Assert at runtime that inputs are only numbers.
746 __ AbortIfNotNumber(rdx);
747 __ AbortIfNotNumber(rax);
748 }
749 FloatingPointHelper::LoadNumbersAsIntegers(masm);
750 } else {
751 FloatingPointHelper::LoadAsIntegers(masm,
752 &call_runtime,
753 heap_number_map);
754 }
755 switch (op_) {
756 case Token::BIT_OR: __ orl(rax, rcx); break;
757 case Token::BIT_AND: __ andl(rax, rcx); break;
758 case Token::BIT_XOR: __ xorl(rax, rcx); break;
759 case Token::SAR: __ sarl_cl(rax); break;
760 case Token::SHL: __ shll_cl(rax); break;
761 case Token::SHR: {
762 __ shrl_cl(rax);
763 // Check if result is negative. This can only happen for a shift
764 // by zero.
765 __ testl(rax, rax);
766 __ j(negative, &non_smi_shr_result);
767 break;
768 }
769 default: UNREACHABLE();
770 }
771
772 STATIC_ASSERT(kSmiValueSize == 32);
773 // Tag smi result and return.
774 __ Integer32ToSmi(rax, rax);
775 GenerateReturn(masm);
776
777 // All bit-ops except SHR return a signed int32 that can be
778 // returned immediately as a smi.
779 // We might need to allocate a HeapNumber if we shift a negative
780 // number right by zero (i.e., convert to UInt32).
781 if (op_ == Token::SHR) {
782 ASSERT(non_smi_shr_result.is_linked());
783 __ bind(&non_smi_shr_result);
784 // Allocate a heap number if needed.
785 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
786 switch (mode_) {
787 case OVERWRITE_LEFT:
788 case OVERWRITE_RIGHT:
789 // If the operand was an object, we skip the
790 // allocation of a heap number.
791 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
792 1 * kPointerSize : 2 * kPointerSize));
793 __ JumpIfNotSmi(rax, &skip_allocation);
794 // Fall through!
795 case NO_OVERWRITE:
796 // Allocate heap number in new space.
797 // Not using AllocateHeapNumber macro in order to reuse
798 // already loaded heap_number_map.
799 __ AllocateInNewSpace(HeapNumber::kSize,
800 rax,
801 rcx,
802 no_reg,
803 &call_runtime,
804 TAG_OBJECT);
805 // Set the map.
806 if (FLAG_debug_code) {
807 __ AbortIfNotRootValue(heap_number_map,
808 Heap::kHeapNumberMapRootIndex,
809 "HeapNumberMap register clobbered.");
810 }
811 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
812 heap_number_map);
813 __ bind(&skip_allocation);
814 break;
815 default: UNREACHABLE();
816 }
817 // Store the result in the HeapNumber and return.
818 __ cvtqsi2sd(xmm0, rbx);
819 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
820 GenerateReturn(masm);
821 }
822
823 break;
824 }
825 default: UNREACHABLE(); break;
826 }
827 }
828
829 // If all else fails, use the runtime system to get the correct
830 // result. If arguments was passed in registers now place them on the
831 // stack in the correct order below the return address.
832 __ bind(&call_runtime);
833
834 if (HasArgsInRegisters()) {
835 GenerateRegisterArgsPush(masm);
836 }
837
838 switch (op_) {
839 case Token::ADD: {
840 // Registers containing left and right operands respectively.
841 Register lhs, rhs;
842
843 if (HasArgsReversed()) {
844 lhs = rax;
845 rhs = rdx;
846 } else {
847 lhs = rdx;
848 rhs = rax;
849 }
850
851 // Test for string arguments before calling runtime.
852 Label not_strings, both_strings, not_string1, string1, string1_smi2;
853
854 // If this stub has already generated FP-specific code then the arguments
855 // are already in rdx and rax.
856 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
857 GenerateLoadArguments(masm);
858 }
859
860 Condition is_smi;
861 is_smi = masm->CheckSmi(lhs);
862 __ j(is_smi, &not_string1);
863 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
864 __ j(above_equal, &not_string1);
865
866 // First argument is a a string, test second.
867 is_smi = masm->CheckSmi(rhs);
868 __ j(is_smi, &string1_smi2);
869 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
870 __ j(above_equal, &string1);
871
872 // First and second argument are strings.
873 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
874 __ TailCallStub(&string_add_stub);
875
876 __ bind(&string1_smi2);
877 // First argument is a string, second is a smi. Try to lookup the number
878 // string for the smi in the number string cache.
879 NumberToStringStub::GenerateLookupNumberStringCache(
880 masm, rhs, rbx, rcx, r8, true, &string1);
881
882 // Replace second argument on stack and tailcall string add stub to make
883 // the result.
884 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
885 __ TailCallStub(&string_add_stub);
886
887 // Only first argument is a string.
888 __ bind(&string1);
889 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
890
891 // First argument was not a string, test second.
892 __ bind(&not_string1);
893 is_smi = masm->CheckSmi(rhs);
894 __ j(is_smi, &not_strings);
895 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
896 __ j(above_equal, &not_strings);
897
898 // Only second argument is a string.
899 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
900
901 __ bind(&not_strings);
902 // Neither argument is a string.
903 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
904 break;
905 }
906 case Token::SUB:
907 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
908 break;
909 case Token::MUL:
910 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
911 break;
912 case Token::DIV:
913 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
914 break;
915 case Token::MOD:
916 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
917 break;
918 case Token::BIT_OR:
919 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
920 break;
921 case Token::BIT_AND:
922 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
923 break;
924 case Token::BIT_XOR:
925 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
926 break;
927 case Token::SAR:
928 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
929 break;
930 case Token::SHL:
931 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
932 break;
933 case Token::SHR:
934 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
935 break;
936 default:
937 UNREACHABLE();
938 }
939}
940
941
942void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
943 ASSERT(!HasArgsInRegisters());
944 __ movq(rax, Operand(rsp, 1 * kPointerSize));
945 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
946}
947
948
949void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
950 // If arguments are not passed in registers remove them from the stack before
951 // returning.
952 if (!HasArgsInRegisters()) {
953 __ ret(2 * kPointerSize); // Remove both operands
954 } else {
955 __ ret(0);
956 }
957}
958
959
960void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
961 ASSERT(HasArgsInRegisters());
962 __ pop(rcx);
963 if (HasArgsReversed()) {
964 __ push(rax);
965 __ push(rdx);
966 } else {
967 __ push(rdx);
968 __ push(rax);
969 }
970 __ push(rcx);
971}
972
973
974void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
975 Label get_result;
976
977 // Ensure the operands are on the stack.
978 if (HasArgsInRegisters()) {
979 GenerateRegisterArgsPush(masm);
980 }
981
982 // Left and right arguments are already on stack.
983 __ pop(rcx); // Save the return address.
984
985 // Push this stub's key.
986 __ Push(Smi::FromInt(MinorKey()));
987
988 // Although the operation and the type info are encoded into the key,
989 // the encoding is opaque, so push them too.
990 __ Push(Smi::FromInt(op_));
991
992 __ Push(Smi::FromInt(runtime_operands_type_));
993
994 __ push(rcx); // The return address.
995
996 // Perform patching to an appropriate fast case and return the result.
997 __ TailCallExternalReference(
998 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
999 5,
1000 1);
1001}
1002
1003
1004Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1005 GenericBinaryOpStub stub(key, type_info);
1006 return stub.GetCode();
1007}
1008
1009
Ben Murdochb0fe1622011-05-05 13:52:32 +01001010Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1011 TRBinaryOpIC::TypeInfo type_info,
1012 TRBinaryOpIC::TypeInfo result_type_info) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001013 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1014 return stub.GetCode();
1015}
1016
1017
1018void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1019 __ pop(rcx); // Save return address.
1020 __ push(rdx);
1021 __ push(rax);
1022 // Left and right arguments are now on top.
1023 // Push this stub's key. Although the operation and the type info are
1024 // encoded into the key, the encoding is opaque, so push them too.
1025 __ Push(Smi::FromInt(MinorKey()));
1026 __ Push(Smi::FromInt(op_));
1027 __ Push(Smi::FromInt(operands_type_));
1028
1029 __ push(rcx); // Push return address.
1030
1031 // Patch the caller to an appropriate specialized stub and return the
1032 // operation result to the caller of the stub.
1033 __ TailCallExternalReference(
1034 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1035 5,
1036 1);
1037}
1038
1039
Ben Murdoch086aeea2011-05-13 15:57:08 +01001040void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1041 switch (operands_type_) {
1042 case TRBinaryOpIC::UNINITIALIZED:
1043 GenerateTypeTransition(masm);
1044 break;
1045 case TRBinaryOpIC::SMI:
1046 GenerateSmiStub(masm);
1047 break;
1048 case TRBinaryOpIC::INT32:
Steve Block1e0659c2011-05-24 12:43:12 +01001049 UNREACHABLE();
1050 // The int32 case is identical to the Smi case. We avoid creating this
1051 // ic state on x64.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001052 break;
1053 case TRBinaryOpIC::HEAP_NUMBER:
1054 GenerateHeapNumberStub(masm);
1055 break;
1056 case TRBinaryOpIC::STRING:
1057 GenerateStringStub(masm);
1058 break;
1059 case TRBinaryOpIC::GENERIC:
1060 GenerateGeneric(masm);
1061 break;
1062 default:
1063 UNREACHABLE();
1064 }
1065}
1066
1067
1068const char* TypeRecordingBinaryOpStub::GetName() {
1069 if (name_ != NULL) return name_;
1070 const int kMaxNameLength = 100;
1071 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1072 if (name_ == NULL) return "OOM";
1073 const char* op_name = Token::Name(op_);
1074 const char* overwrite_name;
1075 switch (mode_) {
1076 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1077 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1078 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1079 default: overwrite_name = "UnknownOverwrite"; break;
1080 }
1081
1082 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1083 "TypeRecordingBinaryOpStub_%s_%s_%s",
1084 op_name,
1085 overwrite_name,
1086 TRBinaryOpIC::GetName(operands_type_));
1087 return name_;
1088}
1089
1090
1091void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1092 Label* slow,
1093 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
Steve Block1e0659c2011-05-24 12:43:12 +01001094
1095 // We only generate heapnumber answers for overflowing calculations
1096 // for the four basic arithmetic operations.
1097 bool generate_inline_heapnumber_results =
1098 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1099 (op_ == Token::ADD || op_ == Token::SUB ||
1100 op_ == Token::MUL || op_ == Token::DIV);
1101
1102 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax.
1103 Register left = rdx;
1104 Register right = rax;
1105
1106
1107 // Smi check of both operands. If op is BIT_OR, the check is delayed
1108 // until after the OR operation.
1109 Label not_smis;
1110 Label use_fp_on_smis;
1111 Label restore_MOD_registers; // Only used if op_ == Token::MOD.
1112
1113 if (op_ != Token::BIT_OR) {
1114 Comment smi_check_comment(masm, "-- Smi check arguments");
1115 __ JumpIfNotBothSmi(left, right, &not_smis);
1116 }
1117
1118 // Perform the operation.
1119 Comment perform_smi(masm, "-- Perform smi operation");
1120 switch (op_) {
1121 case Token::ADD:
1122 ASSERT(right.is(rax));
1123 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
1124 break;
1125
1126 case Token::SUB:
1127 __ SmiSub(left, left, right, &use_fp_on_smis);
1128 __ movq(rax, left);
1129 break;
1130
1131 case Token::MUL:
1132 ASSERT(right.is(rax));
1133 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
1134 break;
1135
1136 case Token::DIV:
1137 // SmiDiv will not accept left in rdx or right in rax.
1138 left = rcx;
1139 right = rbx;
1140 __ movq(rbx, rax);
1141 __ movq(rcx, rdx);
1142 __ SmiDiv(rax, left, right, &use_fp_on_smis);
1143 break;
1144
1145 case Token::MOD:
1146 // SmiMod will not accept left in rdx or right in rax.
1147 left = rcx;
1148 right = rbx;
1149 __ movq(rbx, rax);
1150 __ movq(rcx, rdx);
1151 __ SmiMod(rax, left, right, &use_fp_on_smis);
1152 break;
1153
1154 case Token::BIT_OR: {
1155 ASSERT(right.is(rax));
1156 __ movq(rcx, right); // Save the right operand.
1157 __ SmiOr(right, right, left); // BIT_OR is commutative.
1158 __ JumpIfNotSmi(right, &not_smis); // Test delayed until after BIT_OR.
1159 break;
1160 }
1161 case Token::BIT_XOR:
1162 ASSERT(right.is(rax));
1163 __ SmiXor(right, right, left); // BIT_XOR is commutative.
1164 break;
1165
1166 case Token::BIT_AND:
1167 ASSERT(right.is(rax));
1168 __ SmiAnd(right, right, left); // BIT_AND is commutative.
1169 break;
1170
1171 case Token::SHL:
1172 __ SmiShiftLeft(left, left, right);
1173 __ movq(rax, left);
1174 break;
1175
1176 case Token::SAR:
1177 __ SmiShiftArithmeticRight(left, left, right);
1178 __ movq(rax, left);
1179 break;
1180
1181 case Token::SHR:
1182 __ SmiShiftLogicalRight(left, left, right, &not_smis);
1183 __ movq(rax, left);
1184 break;
1185
1186 default:
1187 UNREACHABLE();
1188 }
1189
1190 // 5. Emit return of result in rax. Some operations have registers pushed.
1191 __ ret(0);
1192
1193 // 6. For some operations emit inline code to perform floating point
1194 // operations on known smis (e.g., if the result of the operation
1195 // overflowed the smi range).
1196 __ bind(&use_fp_on_smis);
1197 if (op_ == Token::DIV || op_ == Token::MOD) {
1198 // Restore left and right to rdx and rax.
1199 __ movq(rdx, rcx);
1200 __ movq(rax, rbx);
1201 }
1202
1203
1204 if (generate_inline_heapnumber_results) {
1205 __ AllocateHeapNumber(rcx, rbx, slow);
1206 Comment perform_float(masm, "-- Perform float operation on smis");
1207 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1208 switch (op_) {
1209 case Token::ADD: __ addsd(xmm0, xmm1); break;
1210 case Token::SUB: __ subsd(xmm0, xmm1); break;
1211 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1212 case Token::DIV: __ divsd(xmm0, xmm1); break;
1213 default: UNREACHABLE();
1214 }
1215 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
1216 __ movq(rax, rcx);
1217 __ ret(0);
1218 }
1219
1220 // 7. Non-smi operands reach the end of the code generated by
1221 // GenerateSmiCode, and fall through to subsequent code,
1222 // with the operands in rdx and rax.
1223 Comment done_comment(masm, "-- Enter non-smi code");
1224 __ bind(&not_smis);
1225 if (op_ == Token::BIT_OR) {
1226 __ movq(right, rcx);
1227 }
1228}
1229
1230
1231void TypeRecordingBinaryOpStub::GenerateFloatingPointCode(
1232 MacroAssembler* masm,
1233 Label* allocation_failure,
1234 Label* non_numeric_failure) {
1235 switch (op_) {
1236 case Token::ADD:
1237 case Token::SUB:
1238 case Token::MUL:
1239 case Token::DIV: {
1240 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1241
1242 switch (op_) {
1243 case Token::ADD: __ addsd(xmm0, xmm1); break;
1244 case Token::SUB: __ subsd(xmm0, xmm1); break;
1245 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1246 case Token::DIV: __ divsd(xmm0, xmm1); break;
1247 default: UNREACHABLE();
1248 }
1249 GenerateHeapResultAllocation(masm, allocation_failure);
1250 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1251 __ ret(0);
1252 break;
1253 }
1254 case Token::MOD: {
1255 // For MOD we jump to the allocation_failure label, to call runtime.
1256 __ jmp(allocation_failure);
1257 break;
1258 }
1259 case Token::BIT_OR:
1260 case Token::BIT_AND:
1261 case Token::BIT_XOR:
1262 case Token::SAR:
1263 case Token::SHL:
1264 case Token::SHR: {
1265 Label non_smi_shr_result;
1266 Register heap_number_map = r9;
1267 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1268 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1269 heap_number_map);
1270 switch (op_) {
1271 case Token::BIT_OR: __ orl(rax, rcx); break;
1272 case Token::BIT_AND: __ andl(rax, rcx); break;
1273 case Token::BIT_XOR: __ xorl(rax, rcx); break;
1274 case Token::SAR: __ sarl_cl(rax); break;
1275 case Token::SHL: __ shll_cl(rax); break;
1276 case Token::SHR: {
1277 __ shrl_cl(rax);
1278 // Check if result is negative. This can only happen for a shift
1279 // by zero.
1280 __ testl(rax, rax);
1281 __ j(negative, &non_smi_shr_result);
1282 break;
1283 }
1284 default: UNREACHABLE();
1285 }
1286 STATIC_ASSERT(kSmiValueSize == 32);
1287 // Tag smi result and return.
1288 __ Integer32ToSmi(rax, rax);
1289 __ Ret();
1290
1291 // Logical shift right can produce an unsigned int32 that is not
1292 // an int32, and so is not in the smi range. Allocate a heap number
1293 // in that case.
1294 if (op_ == Token::SHR) {
1295 __ bind(&non_smi_shr_result);
1296 Label allocation_failed;
1297 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
1298 // Allocate heap number in new space.
1299 // Not using AllocateHeapNumber macro in order to reuse
1300 // already loaded heap_number_map.
1301 __ AllocateInNewSpace(HeapNumber::kSize,
1302 rax,
1303 rcx,
1304 no_reg,
1305 &allocation_failed,
1306 TAG_OBJECT);
1307 // Set the map.
1308 if (FLAG_debug_code) {
1309 __ AbortIfNotRootValue(heap_number_map,
1310 Heap::kHeapNumberMapRootIndex,
1311 "HeapNumberMap register clobbered.");
1312 }
1313 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1314 heap_number_map);
1315 __ cvtqsi2sd(xmm0, rbx);
1316 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1317 __ Ret();
1318
1319 __ bind(&allocation_failed);
1320 // We need tagged values in rdx and rax for the following code,
1321 // not int32 in rax and rcx.
1322 __ Integer32ToSmi(rax, rcx);
1323 __ Integer32ToSmi(rdx, rax);
1324 __ jmp(allocation_failure);
1325 }
1326 break;
1327 }
1328 default: UNREACHABLE(); break;
1329 }
1330 // No fall-through from this generated code.
1331 if (FLAG_debug_code) {
1332 __ Abort("Unexpected fall-through in "
1333 "TypeRecordingBinaryStub::GenerateFloatingPointCode.");
1334 }
1335}
1336
1337
1338void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001339 ASSERT(op_ == Token::ADD);
1340 NearLabel left_not_string, call_runtime;
1341
Steve Block1e0659c2011-05-24 12:43:12 +01001342 // Registers containing left and right operands respectively.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001343 Register left = rdx;
1344 Register right = rax;
Steve Block1e0659c2011-05-24 12:43:12 +01001345
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001346 // Test if left operand is a string.
1347 __ JumpIfSmi(left, &left_not_string);
1348 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
1349 __ j(above_equal, &left_not_string);
1350 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1351 GenerateRegisterArgsPush(masm);
1352 __ TailCallStub(&string_add_left_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001353
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001354 // Left operand is not a string, test right.
1355 __ bind(&left_not_string);
1356 __ JumpIfSmi(right, &call_runtime);
1357 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
1358 __ j(above_equal, &call_runtime);
Steve Block1e0659c2011-05-24 12:43:12 +01001359
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001360 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1361 GenerateRegisterArgsPush(masm);
1362 __ TailCallStub(&string_add_right_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001363
Steve Block1e0659c2011-05-24 12:43:12 +01001364 // Neither argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001365 __ bind(&call_runtime);
Steve Block1e0659c2011-05-24 12:43:12 +01001366}
1367
1368
1369void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1370 GenerateRegisterArgsPush(masm);
1371 switch (op_) {
1372 case Token::ADD:
1373 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1374 break;
1375 case Token::SUB:
1376 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1377 break;
1378 case Token::MUL:
1379 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1380 break;
1381 case Token::DIV:
1382 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1383 break;
1384 case Token::MOD:
1385 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1386 break;
1387 case Token::BIT_OR:
1388 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1389 break;
1390 case Token::BIT_AND:
1391 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1392 break;
1393 case Token::BIT_XOR:
1394 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1395 break;
1396 case Token::SAR:
1397 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1398 break;
1399 case Token::SHL:
1400 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1401 break;
1402 case Token::SHR:
1403 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1404 break;
1405 default:
1406 UNREACHABLE();
1407 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001408}
1409
1410
1411void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001412 Label not_smi;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001413
Steve Block1e0659c2011-05-24 12:43:12 +01001414 GenerateSmiCode(masm, &not_smi, NO_HEAPNUMBER_RESULTS);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001415
Steve Block1e0659c2011-05-24 12:43:12 +01001416 __ bind(&not_smi);
1417 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001418}
1419
1420
1421void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001422 ASSERT(operands_type_ == TRBinaryOpIC::STRING);
Steve Block1e0659c2011-05-24 12:43:12 +01001423 ASSERT(op_ == Token::ADD);
1424 GenerateStringAddCode(masm);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001425 // Try to add arguments as strings, otherwise, transition to the generic
1426 // TRBinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001427 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001428}
1429
1430
1431void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001432 Label gc_required, not_number;
1433 GenerateFloatingPointCode(masm, &gc_required, &not_number);
1434
1435 __ bind(&not_number);
1436 GenerateTypeTransition(masm);
1437
1438 __ bind(&gc_required);
1439 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001440}
1441
1442
1443void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001444 Label call_runtime, call_string_add_or_runtime;
1445
1446 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1447
1448 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1449
1450 __ bind(&call_string_add_or_runtime);
1451 if (op_ == Token::ADD) {
1452 GenerateStringAddCode(masm);
1453 }
1454
1455 __ bind(&call_runtime);
1456 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001457}
1458
1459
1460void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
1461 MacroAssembler* masm,
1462 Label* alloc_failure) {
Steve Block1e0659c2011-05-24 12:43:12 +01001463 Label skip_allocation;
1464 OverwriteMode mode = mode_;
1465 switch (mode) {
1466 case OVERWRITE_LEFT: {
1467 // If the argument in rdx is already an object, we skip the
1468 // allocation of a heap number.
1469 __ JumpIfNotSmi(rdx, &skip_allocation);
1470 // Allocate a heap number for the result. Keep eax and edx intact
1471 // for the possible runtime call.
1472 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1473 // Now rdx can be overwritten losing one of the arguments as we are
1474 // now done and will not need it any more.
1475 __ movq(rdx, rbx);
1476 __ bind(&skip_allocation);
1477 // Use object in rdx as a result holder
1478 __ movq(rax, rdx);
1479 break;
1480 }
1481 case OVERWRITE_RIGHT:
1482 // If the argument in rax is already an object, we skip the
1483 // allocation of a heap number.
1484 __ JumpIfNotSmi(rax, &skip_allocation);
1485 // Fall through!
1486 case NO_OVERWRITE:
1487 // Allocate a heap number for the result. Keep rax and rdx intact
1488 // for the possible runtime call.
1489 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1490 // Now rax can be overwritten losing one of the arguments as we are
1491 // now done and will not need it any more.
1492 __ movq(rax, rbx);
1493 __ bind(&skip_allocation);
1494 break;
1495 default: UNREACHABLE();
1496 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001497}
1498
1499
1500void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1501 __ pop(rcx);
1502 __ push(rdx);
1503 __ push(rax);
1504 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001505}
1506
1507
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001508void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001509 // TAGGED case:
1510 // Input:
1511 // rsp[8]: argument (should be number).
1512 // rsp[0]: return address.
1513 // Output:
1514 // rax: tagged double result.
1515 // UNTAGGED case:
1516 // Input::
1517 // rsp[0]: return address.
1518 // xmm1: untagged double input argument
1519 // Output:
1520 // xmm1: untagged double result.
1521
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001522 Label runtime_call;
1523 Label runtime_call_clear_stack;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001524 Label skip_cache;
1525 const bool tagged = (argument_type_ == TAGGED);
1526 if (tagged) {
1527 NearLabel input_not_smi;
1528 NearLabel loaded;
1529 // Test that rax is a number.
1530 __ movq(rax, Operand(rsp, kPointerSize));
1531 __ JumpIfNotSmi(rax, &input_not_smi);
1532 // Input is a smi. Untag and load it onto the FPU stack.
1533 // Then load the bits of the double into rbx.
1534 __ SmiToInteger32(rax, rax);
1535 __ subq(rsp, Immediate(kDoubleSize));
1536 __ cvtlsi2sd(xmm1, rax);
1537 __ movsd(Operand(rsp, 0), xmm1);
1538 __ movq(rbx, xmm1);
1539 __ movq(rdx, xmm1);
1540 __ fld_d(Operand(rsp, 0));
1541 __ addq(rsp, Immediate(kDoubleSize));
1542 __ jmp(&loaded);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001543
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001544 __ bind(&input_not_smi);
1545 // Check if input is a HeapNumber.
1546 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
1547 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1548 __ j(not_equal, &runtime_call);
1549 // Input is a HeapNumber. Push it on the FPU stack and load its
1550 // bits into rbx.
1551 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1552 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1553 __ movq(rdx, rbx);
1554
1555 __ bind(&loaded);
1556 } else { // UNTAGGED.
1557 __ movq(rbx, xmm1);
1558 __ movq(rdx, xmm1);
1559 }
1560
1561 // ST[0] == double value, if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001562 // rbx = bits of double value.
1563 // rdx = also bits of double value.
1564 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
1565 // h = h0 = bits ^ (bits >> 32);
1566 // h ^= h >> 16;
1567 // h ^= h >> 8;
1568 // h = h & (cacheSize - 1);
1569 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
1570 __ sar(rdx, Immediate(32));
1571 __ xorl(rdx, rbx);
1572 __ movl(rcx, rdx);
1573 __ movl(rax, rdx);
1574 __ movl(rdi, rdx);
1575 __ sarl(rdx, Immediate(8));
1576 __ sarl(rcx, Immediate(16));
1577 __ sarl(rax, Immediate(24));
1578 __ xorl(rcx, rdx);
1579 __ xorl(rax, rdi);
1580 __ xorl(rcx, rax);
1581 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
1582 __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
1583
1584 // ST[0] == double value.
1585 // rbx = bits of double value.
1586 // rcx = TranscendentalCache::hash(double value).
1587 __ movq(rax, ExternalReference::transcendental_cache_array_address());
1588 // rax points to cache array.
1589 __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
1590 // rax points to the cache for the type type_.
1591 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1592 __ testq(rax, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001593 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001594#ifdef DEBUG
1595 // Check that the layout of cache elements match expectations.
1596 { // NOLINT - doesn't like a single brace on a line.
1597 TranscendentalCache::Element test_elem[2];
1598 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1599 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1600 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1601 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1602 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1603 // Two uint_32's and a pointer per element.
1604 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1605 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1606 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1607 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1608 }
1609#endif
1610 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1611 __ addl(rcx, rcx);
1612 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1613 // Check if cache matches: Double value is stored in uint32_t[2] array.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001614 NearLabel cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001615 __ cmpq(rbx, Operand(rcx, 0));
1616 __ j(not_equal, &cache_miss);
1617 // Cache hit!
1618 __ movq(rax, Operand(rcx, 2 * kIntSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001619 if (tagged) {
1620 __ fstp(0); // Clear FPU stack.
1621 __ ret(kPointerSize);
1622 } else { // UNTAGGED.
1623 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1624 __ Ret();
1625 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001626
1627 __ bind(&cache_miss);
1628 // Update cache with new value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001629 if (tagged) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001630 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001631 } else { // UNTAGGED.
1632 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1633 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1634 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1635 }
1636 GenerateOperation(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001637 __ movq(Operand(rcx, 0), rbx);
1638 __ movq(Operand(rcx, 2 * kIntSize), rax);
1639 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001640 if (tagged) {
1641 __ ret(kPointerSize);
1642 } else { // UNTAGGED.
1643 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1644 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001645
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001646 // Skip cache and return answer directly, only in untagged case.
1647 __ bind(&skip_cache);
1648 __ subq(rsp, Immediate(kDoubleSize));
1649 __ movsd(Operand(rsp, 0), xmm1);
1650 __ fld_d(Operand(rsp, 0));
1651 GenerateOperation(masm);
1652 __ fstp_d(Operand(rsp, 0));
1653 __ movsd(xmm1, Operand(rsp, 0));
1654 __ addq(rsp, Immediate(kDoubleSize));
1655 // We return the value in xmm1 without adding it to the cache, but
1656 // we cause a scavenging GC so that future allocations will succeed.
1657 __ EnterInternalFrame();
1658 // Allocate an unused object bigger than a HeapNumber.
1659 __ Push(Smi::FromInt(2 * kDoubleSize));
1660 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1661 __ LeaveInternalFrame();
1662 __ Ret();
1663 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001664
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001665 // Call runtime, doing whatever allocation and cleanup is necessary.
1666 if (tagged) {
1667 __ bind(&runtime_call_clear_stack);
1668 __ fstp(0);
1669 __ bind(&runtime_call);
1670 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
1671 } else { // UNTAGGED.
1672 __ bind(&runtime_call_clear_stack);
1673 __ bind(&runtime_call);
1674 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1675 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1676 __ EnterInternalFrame();
1677 __ push(rax);
1678 __ CallRuntime(RuntimeFunction(), 1);
1679 __ LeaveInternalFrame();
1680 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1681 __ Ret();
1682 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001683}
1684
1685
1686Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1687 switch (type_) {
1688 // Add more cases when necessary.
1689 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1690 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001691 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001692 default:
1693 UNIMPLEMENTED();
1694 return Runtime::kAbort;
1695 }
1696}
1697
1698
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001699void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001700 // Registers:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001701 // rax: Newly allocated HeapNumber, which must be preserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001702 // rbx: Bits of input double. Must be preserved.
1703 // rcx: Pointer to cache entry. Must be preserved.
1704 // st(0): Input double
1705 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001706 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
1707 // Both fsin and fcos require arguments in the range +/-2^63 and
1708 // return NaN for infinities and NaN. They can share all code except
1709 // the actual fsin/fcos operation.
1710 Label in_range;
1711 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
1712 // work. We must reduce it to the appropriate range.
1713 __ movq(rdi, rbx);
1714 // Move exponent and sign bits to low bits.
1715 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
1716 // Remove sign bit.
1717 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
1718 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
1719 __ cmpl(rdi, Immediate(supported_exponent_limit));
1720 __ j(below, &in_range);
1721 // Check for infinity and NaN. Both return NaN for sin.
1722 __ cmpl(rdi, Immediate(0x7ff));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001723 NearLabel non_nan_result;
1724 __ j(not_equal, &non_nan_result);
1725 // Input is +/-Infinity or NaN. Result is NaN.
1726 __ fstp(0);
1727 __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
1728 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
1729 __ jmp(&done);
1730
1731 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001732
Ben Murdochb0fe1622011-05-05 13:52:32 +01001733 // Use fpmod to restrict argument to the range +/-2*PI.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001734 __ movq(rdi, rax); // Save rax before using fnstsw_ax.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001735 __ fldpi();
1736 __ fadd(0);
1737 __ fld(1);
1738 // FPU Stack: input, 2*pi, input.
1739 {
1740 Label no_exceptions;
1741 __ fwait();
1742 __ fnstsw_ax();
1743 // Clear if Illegal Operand or Zero Division exceptions are set.
1744 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1745 __ j(zero, &no_exceptions);
1746 __ fnclex();
1747 __ bind(&no_exceptions);
1748 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001749
Ben Murdochb0fe1622011-05-05 13:52:32 +01001750 // Compute st(0) % st(1)
1751 {
1752 NearLabel partial_remainder_loop;
1753 __ bind(&partial_remainder_loop);
1754 __ fprem1();
1755 __ fwait();
1756 __ fnstsw_ax();
1757 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1758 // If C2 is set, computation only has partial result. Loop to
1759 // continue computation.
1760 __ j(not_zero, &partial_remainder_loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001761 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001762 // FPU Stack: input, 2*pi, input % 2*pi
1763 __ fstp(2);
1764 // FPU Stack: input % 2*pi, 2*pi,
1765 __ fstp(0);
1766 // FPU Stack: input % 2*pi
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001767 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001768 __ bind(&in_range);
1769 switch (type_) {
1770 case TranscendentalCache::SIN:
1771 __ fsin();
1772 break;
1773 case TranscendentalCache::COS:
1774 __ fcos();
1775 break;
1776 default:
1777 UNREACHABLE();
1778 }
1779 __ bind(&done);
1780 } else {
1781 ASSERT(type_ == TranscendentalCache::LOG);
1782 __ fldln2();
1783 __ fxch();
1784 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001785 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001786}
1787
1788
1789// Get the integer part of a heap number.
1790// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
1791void IntegerConvert(MacroAssembler* masm,
1792 Register result,
1793 Register source) {
1794 // Result may be rcx. If result and source are the same register, source will
1795 // be overwritten.
1796 ASSERT(!result.is(rdi) && !result.is(rbx));
1797 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
1798 // cvttsd2si (32-bit version) directly.
1799 Register double_exponent = rbx;
1800 Register double_value = rdi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001801 NearLabel done, exponent_63_plus;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001802 // Get double and extract exponent.
1803 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
1804 // Clear result preemptively, in case we need to return zero.
1805 __ xorl(result, result);
1806 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
1807 // Double to remove sign bit, shift exponent down to least significant bits.
1808 // and subtract bias to get the unshifted, unbiased exponent.
1809 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
1810 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
1811 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
1812 // Check whether the exponent is too big for a 63 bit unsigned integer.
1813 __ cmpl(double_exponent, Immediate(63));
1814 __ j(above_equal, &exponent_63_plus);
1815 // Handle exponent range 0..62.
1816 __ cvttsd2siq(result, xmm0);
1817 __ jmp(&done);
1818
1819 __ bind(&exponent_63_plus);
1820 // Exponent negative or 63+.
1821 __ cmpl(double_exponent, Immediate(83));
1822 // If exponent negative or above 83, number contains no significant bits in
1823 // the range 0..2^31, so result is zero, and rcx already holds zero.
1824 __ j(above, &done);
1825
1826 // Exponent in rage 63..83.
1827 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
1828 // the least significant exponent-52 bits.
1829
1830 // Negate low bits of mantissa if value is negative.
1831 __ addq(double_value, double_value); // Move sign bit to carry.
1832 __ sbbl(result, result); // And convert carry to -1 in result register.
1833 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
1834 __ addl(double_value, result);
1835 // Do xor in opposite directions depending on where we want the result
1836 // (depending on whether result is rcx or not).
1837
1838 if (result.is(rcx)) {
1839 __ xorl(double_value, result);
1840 // Left shift mantissa by (exponent - mantissabits - 1) to save the
1841 // bits that have positional values below 2^32 (the extra -1 comes from the
1842 // doubling done above to move the sign bit into the carry flag).
1843 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1844 __ shll_cl(double_value);
1845 __ movl(result, double_value);
1846 } else {
1847 // As the then-branch, but move double-value to result before shifting.
1848 __ xorl(result, double_value);
1849 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1850 __ shll_cl(result);
1851 }
1852
1853 __ bind(&done);
1854}
1855
1856
1857// Input: rdx, rax are the left and right objects of a bit op.
1858// Output: rax, rcx are left and right integers for a bit op.
1859void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1860 // Check float operands.
1861 Label done;
1862 Label rax_is_smi;
1863 Label rax_is_object;
1864 Label rdx_is_object;
1865
1866 __ JumpIfNotSmi(rdx, &rdx_is_object);
1867 __ SmiToInteger32(rdx, rdx);
1868 __ JumpIfSmi(rax, &rax_is_smi);
1869
1870 __ bind(&rax_is_object);
1871 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx.
1872 __ jmp(&done);
1873
1874 __ bind(&rdx_is_object);
1875 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx.
1876 __ JumpIfNotSmi(rax, &rax_is_object);
1877 __ bind(&rax_is_smi);
1878 __ SmiToInteger32(rcx, rax);
1879
1880 __ bind(&done);
1881 __ movl(rax, rdx);
1882}
1883
1884
1885// Input: rdx, rax are the left and right objects of a bit op.
1886// Output: rax, rcx are left and right integers for a bit op.
Steve Block1e0659c2011-05-24 12:43:12 +01001887// Jump to conversion_failure: rdx and rax are unchanged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001888void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1889 Label* conversion_failure,
1890 Register heap_number_map) {
1891 // Check float operands.
1892 Label arg1_is_object, check_undefined_arg1;
1893 Label arg2_is_object, check_undefined_arg2;
1894 Label load_arg2, done;
1895
1896 __ JumpIfNotSmi(rdx, &arg1_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001897 __ SmiToInteger32(r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001898 __ jmp(&load_arg2);
1899
1900 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1901 __ bind(&check_undefined_arg1);
1902 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1903 __ j(not_equal, conversion_failure);
Steve Block1e0659c2011-05-24 12:43:12 +01001904 __ movl(r8, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001905 __ jmp(&load_arg2);
1906
1907 __ bind(&arg1_is_object);
1908 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1909 __ j(not_equal, &check_undefined_arg1);
Steve Block1e0659c2011-05-24 12:43:12 +01001910 // Get the untagged integer version of the rdx heap number in rcx.
1911 IntegerConvert(masm, r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001912
Steve Block1e0659c2011-05-24 12:43:12 +01001913 // Here r8 has the untagged integer, rax has a Smi or a heap number.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001914 __ bind(&load_arg2);
1915 // Test if arg2 is a Smi.
1916 __ JumpIfNotSmi(rax, &arg2_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001917 __ SmiToInteger32(rcx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001918 __ jmp(&done);
1919
1920 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1921 __ bind(&check_undefined_arg2);
1922 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1923 __ j(not_equal, conversion_failure);
1924 __ movl(rcx, Immediate(0));
1925 __ jmp(&done);
1926
1927 __ bind(&arg2_is_object);
1928 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1929 __ j(not_equal, &check_undefined_arg2);
1930 // Get the untagged integer version of the rax heap number in rcx.
1931 IntegerConvert(masm, rcx, rax);
1932 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01001933 __ movl(rax, r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001934}
1935
1936
1937void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1938 __ SmiToInteger32(kScratchRegister, rdx);
1939 __ cvtlsi2sd(xmm0, kScratchRegister);
1940 __ SmiToInteger32(kScratchRegister, rax);
1941 __ cvtlsi2sd(xmm1, kScratchRegister);
1942}
1943
1944
1945void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1946 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1947 // Load operand in rdx into xmm0.
1948 __ JumpIfSmi(rdx, &load_smi_rdx);
1949 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1950 // Load operand in rax into xmm1.
1951 __ JumpIfSmi(rax, &load_smi_rax);
1952 __ bind(&load_nonsmi_rax);
1953 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1954 __ jmp(&done);
1955
1956 __ bind(&load_smi_rdx);
1957 __ SmiToInteger32(kScratchRegister, rdx);
1958 __ cvtlsi2sd(xmm0, kScratchRegister);
1959 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1960
1961 __ bind(&load_smi_rax);
1962 __ SmiToInteger32(kScratchRegister, rax);
1963 __ cvtlsi2sd(xmm1, kScratchRegister);
1964
1965 __ bind(&done);
1966}
1967
1968
1969void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1970 Label* not_numbers) {
1971 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1972 // Load operand in rdx into xmm0, or branch to not_numbers.
1973 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
1974 __ JumpIfSmi(rdx, &load_smi_rdx);
1975 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
1976 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
1977 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1978 // Load operand in rax into xmm1, or branch to not_numbers.
1979 __ JumpIfSmi(rax, &load_smi_rax);
1980
1981 __ bind(&load_nonsmi_rax);
1982 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1983 __ j(not_equal, not_numbers);
1984 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1985 __ jmp(&done);
1986
1987 __ bind(&load_smi_rdx);
1988 __ SmiToInteger32(kScratchRegister, rdx);
1989 __ cvtlsi2sd(xmm0, kScratchRegister);
1990 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1991
1992 __ bind(&load_smi_rax);
1993 __ SmiToInteger32(kScratchRegister, rax);
1994 __ cvtlsi2sd(xmm1, kScratchRegister);
1995 __ bind(&done);
1996}
1997
1998
1999void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
2000 Label slow, done;
2001
2002 if (op_ == Token::SUB) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002003 if (include_smi_code_) {
2004 // Check whether the value is a smi.
2005 Label try_float;
2006 __ JumpIfNotSmi(rax, &try_float);
2007 if (negative_zero_ == kIgnoreNegativeZero) {
2008 __ SmiCompare(rax, Smi::FromInt(0));
2009 __ j(equal, &done);
2010 }
2011 __ SmiNeg(rax, rax, &done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002012 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002013
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002014 // Try floating point case.
2015 __ bind(&try_float);
2016 } else if (FLAG_debug_code) {
2017 __ AbortIfSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002018 }
2019
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002020 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2021 Heap::kHeapNumberMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002022 __ j(not_equal, &slow);
2023 // Operand is a float, negate its value by flipping sign bit.
2024 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
2025 __ movq(kScratchRegister, Immediate(0x01));
2026 __ shl(kScratchRegister, Immediate(63));
2027 __ xor_(rdx, kScratchRegister); // Flip sign.
2028 // rdx is value to store.
2029 if (overwrite_ == UNARY_OVERWRITE) {
2030 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
2031 } else {
2032 __ AllocateHeapNumber(rcx, rbx, &slow);
2033 // rcx: allocated 'empty' number
2034 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
2035 __ movq(rax, rcx);
2036 }
2037 } else if (op_ == Token::BIT_NOT) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002038 if (include_smi_code_) {
2039 Label try_float;
2040 __ JumpIfNotSmi(rax, &try_float);
2041 __ SmiNot(rax, rax);
2042 __ jmp(&done);
2043 // Try floating point case.
2044 __ bind(&try_float);
2045 } else if (FLAG_debug_code) {
2046 __ AbortIfSmi(rax);
2047 }
2048
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002049 // Check if the operand is a heap number.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002050 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2051 Heap::kHeapNumberMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002052 __ j(not_equal, &slow);
2053
2054 // Convert the heap number in rax to an untagged integer in rcx.
2055 IntegerConvert(masm, rax, rax);
2056
2057 // Do the bitwise operation and smi tag the result.
2058 __ notl(rax);
2059 __ Integer32ToSmi(rax, rax);
2060 }
2061
2062 // Return from the stub.
2063 __ bind(&done);
2064 __ StubReturn(1);
2065
2066 // Handle the slow case by jumping to the JavaScript builtin.
2067 __ bind(&slow);
2068 __ pop(rcx); // pop return address
2069 __ push(rax);
2070 __ push(rcx); // push return address
2071 switch (op_) {
2072 case Token::SUB:
2073 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
2074 break;
2075 case Token::BIT_NOT:
2076 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2077 break;
2078 default:
2079 UNREACHABLE();
2080 }
2081}
2082
2083
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002084void MathPowStub::Generate(MacroAssembler* masm) {
2085 // Registers are used as follows:
2086 // rdx = base
2087 // rax = exponent
2088 // rcx = temporary, result
2089
2090 Label allocate_return, call_runtime;
2091
2092 // Load input parameters.
2093 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2094 __ movq(rax, Operand(rsp, 1 * kPointerSize));
2095
2096 // Save 1 in xmm3 - we need this several times later on.
2097 __ movl(rcx, Immediate(1));
2098 __ cvtlsi2sd(xmm3, rcx);
2099
2100 Label exponent_nonsmi;
2101 Label base_nonsmi;
2102 // If the exponent is a heap number go to that specific case.
2103 __ JumpIfNotSmi(rax, &exponent_nonsmi);
2104 __ JumpIfNotSmi(rdx, &base_nonsmi);
2105
2106 // Optimized version when both exponent and base are smis.
2107 Label powi;
2108 __ SmiToInteger32(rdx, rdx);
2109 __ cvtlsi2sd(xmm0, rdx);
2110 __ jmp(&powi);
2111 // Exponent is a smi and base is a heapnumber.
2112 __ bind(&base_nonsmi);
2113 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2114 Heap::kHeapNumberMapRootIndex);
2115 __ j(not_equal, &call_runtime);
2116
2117 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2118
2119 // Optimized version of pow if exponent is a smi.
2120 // xmm0 contains the base.
2121 __ bind(&powi);
2122 __ SmiToInteger32(rax, rax);
2123
2124 // Save exponent in base as we need to check if exponent is negative later.
2125 // We know that base and exponent are in different registers.
2126 __ movq(rdx, rax);
2127
2128 // Get absolute value of exponent.
2129 NearLabel no_neg;
2130 __ cmpl(rax, Immediate(0));
2131 __ j(greater_equal, &no_neg);
2132 __ negl(rax);
2133 __ bind(&no_neg);
2134
2135 // Load xmm1 with 1.
2136 __ movsd(xmm1, xmm3);
2137 NearLabel while_true;
2138 NearLabel no_multiply;
2139
2140 __ bind(&while_true);
2141 __ shrl(rax, Immediate(1));
2142 __ j(not_carry, &no_multiply);
2143 __ mulsd(xmm1, xmm0);
2144 __ bind(&no_multiply);
2145 __ mulsd(xmm0, xmm0);
2146 __ j(not_zero, &while_true);
2147
2148 // Base has the original value of the exponent - if the exponent is
2149 // negative return 1/result.
2150 __ testl(rdx, rdx);
2151 __ j(positive, &allocate_return);
2152 // Special case if xmm1 has reached infinity.
2153 __ divsd(xmm3, xmm1);
2154 __ movsd(xmm1, xmm3);
2155 __ xorpd(xmm0, xmm0);
2156 __ ucomisd(xmm0, xmm1);
2157 __ j(equal, &call_runtime);
2158
2159 __ jmp(&allocate_return);
2160
2161 // Exponent (or both) is a heapnumber - no matter what we should now work
2162 // on doubles.
2163 __ bind(&exponent_nonsmi);
2164 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2165 Heap::kHeapNumberMapRootIndex);
2166 __ j(not_equal, &call_runtime);
2167 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2168 // Test if exponent is nan.
2169 __ ucomisd(xmm1, xmm1);
2170 __ j(parity_even, &call_runtime);
2171
2172 NearLabel base_not_smi;
2173 NearLabel handle_special_cases;
2174 __ JumpIfNotSmi(rdx, &base_not_smi);
2175 __ SmiToInteger32(rdx, rdx);
2176 __ cvtlsi2sd(xmm0, rdx);
2177 __ jmp(&handle_special_cases);
2178
2179 __ bind(&base_not_smi);
2180 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2181 Heap::kHeapNumberMapRootIndex);
2182 __ j(not_equal, &call_runtime);
2183 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
2184 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2185 __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
2186 // base is NaN or +/-Infinity
2187 __ j(greater_equal, &call_runtime);
2188 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2189
2190 // base is in xmm0 and exponent is in xmm1.
2191 __ bind(&handle_special_cases);
2192 NearLabel not_minus_half;
2193 // Test for -0.5.
2194 // Load xmm2 with -0.5.
2195 __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
2196 __ movq(xmm2, rcx);
2197 // xmm2 now has -0.5.
2198 __ ucomisd(xmm2, xmm1);
2199 __ j(not_equal, &not_minus_half);
2200
2201 // Calculates reciprocal of square root.
2202 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2203 __ xorpd(xmm1, xmm1);
2204 __ addsd(xmm1, xmm0);
2205 __ sqrtsd(xmm1, xmm1);
2206 __ divsd(xmm3, xmm1);
2207 __ movsd(xmm1, xmm3);
2208 __ jmp(&allocate_return);
2209
2210 // Test for 0.5.
2211 __ bind(&not_minus_half);
2212 // Load xmm2 with 0.5.
2213 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2214 __ addsd(xmm2, xmm3);
2215 // xmm2 now has 0.5.
2216 __ ucomisd(xmm2, xmm1);
2217 __ j(not_equal, &call_runtime);
2218 // Calculates square root.
2219 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2220 __ xorpd(xmm1, xmm1);
2221 __ addsd(xmm1, xmm0);
2222 __ sqrtsd(xmm1, xmm1);
2223
2224 __ bind(&allocate_return);
2225 __ AllocateHeapNumber(rcx, rax, &call_runtime);
2226 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
2227 __ movq(rax, rcx);
2228 __ ret(2 * kPointerSize);
2229
2230 __ bind(&call_runtime);
2231 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2232}
2233
2234
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002235void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2236 // The key is in rdx and the parameter count is in rax.
2237
2238 // The displacement is used for skipping the frame pointer on the
2239 // stack. It is the offset of the last parameter (if any) relative
2240 // to the frame pointer.
2241 static const int kDisplacement = 1 * kPointerSize;
2242
2243 // Check that the key is a smi.
2244 Label slow;
2245 __ JumpIfNotSmi(rdx, &slow);
2246
2247 // Check if the calling frame is an arguments adaptor frame.
2248 Label adaptor;
2249 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2250 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
2251 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2252 __ j(equal, &adaptor);
2253
2254 // Check index against formal parameters count limit passed in
2255 // through register rax. Use unsigned comparison to get negative
2256 // check for free.
2257 __ cmpq(rdx, rax);
2258 __ j(above_equal, &slow);
2259
2260 // Read the argument from the stack and return it.
2261 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
2262 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
2263 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2264 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2265 __ Ret();
2266
2267 // Arguments adaptor case: Check index against actual arguments
2268 // limit found in the arguments adaptor frame. Use unsigned
2269 // comparison to get negative check for free.
2270 __ bind(&adaptor);
2271 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2272 __ cmpq(rdx, rcx);
2273 __ j(above_equal, &slow);
2274
2275 // Read the argument from the stack and return it.
2276 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
2277 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
2278 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2279 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2280 __ Ret();
2281
2282 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2283 // by calling the runtime system.
2284 __ bind(&slow);
2285 __ pop(rbx); // Return address.
2286 __ push(rdx);
2287 __ push(rbx);
2288 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2289}
2290
2291
2292void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2293 // rsp[0] : return address
2294 // rsp[8] : number of parameters
2295 // rsp[16] : receiver displacement
2296 // rsp[24] : function
2297
2298 // The displacement is used for skipping the return address and the
2299 // frame pointer on the stack. It is the offset of the last
2300 // parameter (if any) relative to the frame pointer.
2301 static const int kDisplacement = 2 * kPointerSize;
2302
2303 // Check if the calling frame is an arguments adaptor frame.
2304 Label adaptor_frame, try_allocate, runtime;
2305 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2306 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
2307 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2308 __ j(equal, &adaptor_frame);
2309
2310 // Get the length from the frame.
2311 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
2312 __ jmp(&try_allocate);
2313
2314 // Patch the arguments.length and the parameters pointer.
2315 __ bind(&adaptor_frame);
2316 __ SmiToInteger32(rcx,
2317 Operand(rdx,
2318 ArgumentsAdaptorFrameConstants::kLengthOffset));
2319 // Space on stack must already hold a smi.
2320 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
2321 // Do not clobber the length index for the indexing operation since
2322 // it is used compute the size for allocation later.
2323 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
2324 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2325
2326 // Try the new space allocation. Start out with computing the size of
2327 // the arguments object and the elements array.
2328 Label add_arguments_object;
2329 __ bind(&try_allocate);
2330 __ testl(rcx, rcx);
2331 __ j(zero, &add_arguments_object);
2332 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2333 __ bind(&add_arguments_object);
2334 __ addl(rcx, Immediate(Heap::kArgumentsObjectSize));
2335
2336 // Do the allocation of both objects in one go.
2337 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2338
2339 // Get the arguments boilerplate from the current (global) context.
2340 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
2341 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2342 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2343 __ movq(rdi, Operand(rdi, offset));
2344
2345 // Copy the JS object part.
2346 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
2347 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2348 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2349 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2350 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2351 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2352 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2353
2354 // Setup the callee in-object property.
2355 ASSERT(Heap::arguments_callee_index == 0);
2356 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2357 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
2358
2359 // Get the length (smi tagged) and set that as an in-object property too.
2360 ASSERT(Heap::arguments_length_index == 1);
2361 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2362 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
2363
2364 // If there are no actual arguments, we're done.
2365 Label done;
2366 __ SmiTest(rcx);
2367 __ j(zero, &done);
2368
2369 // Get the parameters pointer from the stack and untag the length.
2370 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2371
2372 // Setup the elements pointer in the allocated arguments object and
2373 // initialize the header in the elements fixed array.
2374 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2375 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2376 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2377 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2378 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2379 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2380
2381 // Copy the fixed array slots.
2382 Label loop;
2383 __ bind(&loop);
2384 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2385 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2386 __ addq(rdi, Immediate(kPointerSize));
2387 __ subq(rdx, Immediate(kPointerSize));
2388 __ decl(rcx);
2389 __ j(not_zero, &loop);
2390
2391 // Return and remove the on-stack parameters.
2392 __ bind(&done);
2393 __ ret(3 * kPointerSize);
2394
2395 // Do the runtime call to allocate the arguments object.
2396 __ bind(&runtime);
2397 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2398}
2399
2400
2401void RegExpExecStub::Generate(MacroAssembler* masm) {
2402 // Just jump directly to runtime if native RegExp is not selected at compile
2403 // time or if regexp entry in generated code is turned off runtime switch or
2404 // at compilation.
2405#ifdef V8_INTERPRETED_REGEXP
2406 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2407#else // V8_INTERPRETED_REGEXP
2408 if (!FLAG_regexp_entry_native) {
2409 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2410 return;
2411 }
2412
2413 // Stack frame on entry.
Steve Block1e0659c2011-05-24 12:43:12 +01002414 // rsp[0]: return address
2415 // rsp[8]: last_match_info (expected JSArray)
2416 // rsp[16]: previous index
2417 // rsp[24]: subject string
2418 // rsp[32]: JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002419
2420 static const int kLastMatchInfoOffset = 1 * kPointerSize;
2421 static const int kPreviousIndexOffset = 2 * kPointerSize;
2422 static const int kSubjectOffset = 3 * kPointerSize;
2423 static const int kJSRegExpOffset = 4 * kPointerSize;
2424
2425 Label runtime;
2426
2427 // Ensure that a RegExp stack is allocated.
2428 ExternalReference address_of_regexp_stack_memory_address =
2429 ExternalReference::address_of_regexp_stack_memory_address();
2430 ExternalReference address_of_regexp_stack_memory_size =
2431 ExternalReference::address_of_regexp_stack_memory_size();
2432 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2433 __ movq(kScratchRegister, Operand(kScratchRegister, 0));
2434 __ testq(kScratchRegister, kScratchRegister);
2435 __ j(zero, &runtime);
2436
2437
2438 // Check that the first argument is a JSRegExp object.
2439 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2440 __ JumpIfSmi(rax, &runtime);
2441 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2442 __ j(not_equal, &runtime);
2443 // Check that the RegExp has been compiled (data contains a fixed array).
2444 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2445 if (FLAG_debug_code) {
2446 Condition is_smi = masm->CheckSmi(rcx);
2447 __ Check(NegateCondition(is_smi),
2448 "Unexpected type for RegExp data, FixedArray expected");
2449 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
2450 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
2451 }
2452
2453 // rcx: RegExp data (FixedArray)
2454 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
2455 __ SmiToInteger32(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
2456 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2457 __ j(not_equal, &runtime);
2458
2459 // rcx: RegExp data (FixedArray)
2460 // Check that the number of captures fit in the static offsets vector buffer.
2461 __ SmiToInteger32(rdx,
2462 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2463 // Calculate number of capture registers (number_of_captures + 1) * 2.
2464 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2465 // Check that the static offsets vector buffer is large enough.
2466 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2467 __ j(above, &runtime);
2468
2469 // rcx: RegExp data (FixedArray)
2470 // rdx: Number of capture registers
2471 // Check that the second argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002472 __ movq(rdi, Operand(rsp, kSubjectOffset));
2473 __ JumpIfSmi(rdi, &runtime);
2474 Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002475 __ j(NegateCondition(is_string), &runtime);
2476
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002477 // rdi: Subject string.
2478 // rax: RegExp data (FixedArray).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002479 // rdx: Number of capture registers.
2480 // Check that the third argument is a positive smi less than the string
2481 // length. A negative value will be greater (unsigned comparison).
2482 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
2483 __ JumpIfNotSmi(rbx, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002484 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002485 __ j(above_equal, &runtime);
2486
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002487 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002488 // rdx: Number of capture registers
2489 // Check that the fourth object is a JSArray object.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002490 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
2491 __ JumpIfSmi(rdi, &runtime);
2492 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002493 __ j(not_equal, &runtime);
2494 // Check that the JSArray is in fast case.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002495 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
2496 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
2497 __ Cmp(rdi, Factory::fixed_array_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002498 __ j(not_equal, &runtime);
2499 // Check that the last match info has space for the capture registers and the
2500 // additional information. Ensure no overflow in add.
2501 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002502 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002503 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002504 __ cmpl(rdx, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002505 __ j(greater, &runtime);
2506
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002507 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002508 // Check the representation and encoding of the subject string.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002509 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002510 __ movq(rdi, Operand(rsp, kSubjectOffset));
2511 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002512 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2513 // First check for flat two byte string.
2514 __ andb(rbx, Immediate(
2515 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
2516 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
2517 __ j(zero, &seq_two_byte_string);
2518 // Any other flat string must be a flat ascii string.
2519 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
2520 __ j(zero, &seq_ascii_string);
2521
2522 // Check for flat cons string.
2523 // A flat cons string is a cons string where the second part is the empty
2524 // string. In that case the subject string is just the first part of the cons
2525 // string. Also in this case the first part of the cons string is known to be
2526 // a sequential string or an external string.
2527 STATIC_ASSERT(kExternalStringTag !=0);
2528 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
2529 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2530 __ j(not_zero, &runtime);
2531 // String is a cons string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002532 __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002533 __ Cmp(rdx, Factory::empty_string());
2534 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002535 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
2536 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002537 // String is a cons string with empty second part.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002538 // rdi: first part of cons string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002539 // rbx: map of first part of cons string.
2540 // Is first part a flat two byte string?
2541 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2542 Immediate(kStringRepresentationMask | kStringEncodingMask));
2543 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
2544 __ j(zero, &seq_two_byte_string);
2545 // Any other flat string must be ascii.
2546 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2547 Immediate(kStringRepresentationMask));
2548 __ j(not_zero, &runtime);
2549
2550 __ bind(&seq_ascii_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002551 // rdi: subject string (sequential ascii)
2552 // rax: RegExp data (FixedArray)
2553 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
2554 __ Set(rcx, 1); // Type is ascii.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002555 __ jmp(&check_code);
2556
2557 __ bind(&seq_two_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002558 // rdi: subject string (flat two-byte)
2559 // rax: RegExp data (FixedArray)
2560 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
2561 __ Set(rcx, 0); // Type is two byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002562
2563 __ bind(&check_code);
2564 // Check that the irregexp code has been generated for the actual string
2565 // encoding. If it has, the field contains a code object otherwise it contains
2566 // the hole.
2567 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
2568 __ j(not_equal, &runtime);
2569
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002570 // rdi: subject string
2571 // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002572 // r11: code
2573 // Load used arguments before starting to push arguments for call to native
2574 // RegExp code to avoid handling changing stack height.
2575 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
2576
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002577 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002578 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002579 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002580 // r11: code
2581 // All checks done. Now push arguments for native regexp code.
2582 __ IncrementCounter(&Counters::regexp_entry_native, 1);
2583
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002584 static const int kRegExpExecuteArguments = 7;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002585 int argument_slots_on_stack =
2586 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002587 __ EnterApiExitFrame(argument_slots_on_stack); // Clobbers rax!
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002588
2589 // Argument 7: Indicate that this is a direct call from JavaScript.
2590 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2591 Immediate(1));
2592
2593 // Argument 6: Start (high end) of backtracking stack memory area.
2594 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
2595 __ movq(r9, Operand(kScratchRegister, 0));
2596 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2597 __ addq(r9, Operand(kScratchRegister, 0));
2598 // Argument 6 passed in r9 on Linux and on the stack on Windows.
2599#ifdef _WIN64
2600 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
2601#endif
2602
2603 // Argument 5: static offsets vector buffer.
2604 __ movq(r8, ExternalReference::address_of_static_offsets_vector());
2605 // Argument 5 passed in r8 on Linux and on the stack on Windows.
2606#ifdef _WIN64
2607 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
2608#endif
2609
2610 // First four arguments are passed in registers on both Linux and Windows.
2611#ifdef _WIN64
2612 Register arg4 = r9;
2613 Register arg3 = r8;
2614 Register arg2 = rdx;
2615 Register arg1 = rcx;
2616#else
2617 Register arg4 = rcx;
2618 Register arg3 = rdx;
2619 Register arg2 = rsi;
2620 Register arg1 = rdi;
2621#endif
2622
2623 // Keep track on aliasing between argX defined above and the registers used.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002624 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002625 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002626 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002627 // r11: code
2628
2629 // Argument 4: End of string data
2630 // Argument 3: Start of string data
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002631 NearLabel setup_two_byte, setup_rest;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002632 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002633 __ j(zero, &setup_two_byte);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002634 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2635 __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
2636 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002637 __ jmp(&setup_rest);
2638 __ bind(&setup_two_byte);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002639 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2640 __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
2641 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002642
2643 __ bind(&setup_rest);
2644 // Argument 2: Previous index.
2645 __ movq(arg2, rbx);
2646
2647 // Argument 1: Subject string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002648#ifdef _WIN64
2649 __ movq(arg1, rdi);
2650#else
2651 // Already there in AMD64 calling convention.
2652 ASSERT(arg1.is(rdi));
2653#endif
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002654
2655 // Locate the code entry and call it.
2656 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002657 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002658
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002659 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002660
2661 // Check the result.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002662 NearLabel success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002663 Label exception;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002664 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
2665 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002666 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002667 __ j(equal, &exception);
2668 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
2669 // If none of the above, it can only be retry.
2670 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002671 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002672
2673 // For failure return null.
2674 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002675 __ ret(4 * kPointerSize);
2676
2677 // Load RegExp data.
2678 __ bind(&success);
2679 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2680 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2681 __ SmiToInteger32(rax,
2682 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2683 // Calculate number of capture registers (number_of_captures + 1) * 2.
2684 __ leal(rdx, Operand(rax, rax, times_1, 2));
2685
2686 // rdx: Number of capture registers
2687 // Load last_match_info which is still known to be a fast case JSArray.
2688 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2689 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
2690
2691 // rbx: last_match_info backing store (FixedArray)
2692 // rdx: number of capture registers
2693 // Store the capture count.
2694 __ Integer32ToSmi(kScratchRegister, rdx);
2695 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2696 kScratchRegister);
2697 // Store last subject and last input.
2698 __ movq(rax, Operand(rsp, kSubjectOffset));
2699 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2700 __ movq(rcx, rbx);
2701 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
2702 __ movq(rax, Operand(rsp, kSubjectOffset));
2703 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2704 __ movq(rcx, rbx);
2705 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
2706
2707 // Get the static offsets vector filled by the native regexp code.
2708 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
2709
2710 // rbx: last_match_info backing store (FixedArray)
2711 // rcx: offsets vector
2712 // rdx: number of capture registers
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002713 NearLabel next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002714 // Capture register counter starts from number of capture registers and
2715 // counts down until wraping after zero.
2716 __ bind(&next_capture);
2717 __ subq(rdx, Immediate(1));
2718 __ j(negative, &done);
2719 // Read the value from the static offsets vector buffer and make it a smi.
2720 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002721 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002722 // Store the smi value in the last match info.
2723 __ movq(FieldOperand(rbx,
2724 rdx,
2725 times_pointer_size,
2726 RegExpImpl::kFirstCaptureOffset),
2727 rdi);
2728 __ jmp(&next_capture);
2729 __ bind(&done);
2730
2731 // Return last match info.
2732 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2733 __ ret(4 * kPointerSize);
2734
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002735 __ bind(&exception);
2736 // Result must now be exception. If there is no pending exception already a
2737 // stack overflow (on the backtrack stack) was detected in RegExp code but
2738 // haven't created the exception yet. Handle that in the runtime system.
2739 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2740 ExternalReference pending_exception_address(Top::k_pending_exception_address);
2741 __ movq(rbx, pending_exception_address);
2742 __ movq(rax, Operand(rbx, 0));
2743 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2744 __ cmpq(rax, rdx);
2745 __ j(equal, &runtime);
2746 __ movq(Operand(rbx, 0), rdx);
2747
2748 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2749 NearLabel termination_exception;
2750 __ j(equal, &termination_exception);
2751 __ Throw(rax);
2752
2753 __ bind(&termination_exception);
2754 __ ThrowUncatchable(TERMINATION, rax);
2755
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002756 // Do the runtime call to execute the regexp.
2757 __ bind(&runtime);
2758 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2759#endif // V8_INTERPRETED_REGEXP
2760}
2761
2762
Ben Murdochb0fe1622011-05-05 13:52:32 +01002763void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2764 const int kMaxInlineLength = 100;
2765 Label slowcase;
2766 Label done;
2767 __ movq(r8, Operand(rsp, kPointerSize * 3));
2768 __ JumpIfNotSmi(r8, &slowcase);
2769 __ SmiToInteger32(rbx, r8);
2770 __ cmpl(rbx, Immediate(kMaxInlineLength));
2771 __ j(above, &slowcase);
2772 // Smi-tagging is equivalent to multiplying by 2.
2773 STATIC_ASSERT(kSmiTag == 0);
2774 STATIC_ASSERT(kSmiTagSize == 1);
Steve Block1e0659c2011-05-24 12:43:12 +01002775 // Allocate RegExpResult followed by FixedArray with size in rbx.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002776 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2777 // Elements: [Map][Length][..elements..]
2778 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2779 times_pointer_size,
2780 rbx, // In: Number of elements.
2781 rax, // Out: Start of allocation (tagged).
2782 rcx, // Out: End of allocation.
2783 rdx, // Scratch register
2784 &slowcase,
2785 TAG_OBJECT);
2786 // rax: Start of allocated area, object-tagged.
2787 // rbx: Number of array elements as int32.
2788 // r8: Number of array elements as smi.
2789
2790 // Set JSArray map to global.regexp_result_map().
2791 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
2792 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2793 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2794 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2795
2796 // Set empty properties FixedArray.
2797 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2798 Factory::empty_fixed_array());
2799
2800 // Set elements to point to FixedArray allocated right after the JSArray.
2801 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2802 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2803
2804 // Set input, index and length fields from arguments.
2805 __ movq(r8, Operand(rsp, kPointerSize * 1));
2806 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2807 __ movq(r8, Operand(rsp, kPointerSize * 2));
2808 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2809 __ movq(r8, Operand(rsp, kPointerSize * 3));
2810 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2811
2812 // Fill out the elements FixedArray.
2813 // rax: JSArray.
2814 // rcx: FixedArray.
2815 // rbx: Number of elements in array as int32.
2816
2817 // Set map.
2818 __ Move(FieldOperand(rcx, HeapObject::kMapOffset),
2819 Factory::fixed_array_map());
2820 // Set length.
2821 __ Integer32ToSmi(rdx, rbx);
2822 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2823 // Fill contents of fixed-array with the-hole.
2824 __ Move(rdx, Factory::the_hole_value());
2825 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2826 // Fill fixed array elements with hole.
2827 // rax: JSArray.
2828 // rbx: Number of elements in array that remains to be filled, as int32.
2829 // rcx: Start of elements in FixedArray.
2830 // rdx: the hole.
2831 Label loop;
2832 __ testl(rbx, rbx);
2833 __ bind(&loop);
Steve Block1e0659c2011-05-24 12:43:12 +01002834 __ j(less_equal, &done); // Jump if rcx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002835 __ subl(rbx, Immediate(1));
2836 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2837 __ jmp(&loop);
2838
2839 __ bind(&done);
2840 __ ret(3 * kPointerSize);
2841
2842 __ bind(&slowcase);
2843 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2844}
2845
2846
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002847void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2848 Register object,
2849 Register result,
2850 Register scratch1,
2851 Register scratch2,
2852 bool object_is_smi,
2853 Label* not_found) {
2854 // Use of registers. Register result is used as a temporary.
2855 Register number_string_cache = result;
2856 Register mask = scratch1;
2857 Register scratch = scratch2;
2858
2859 // Load the number string cache.
2860 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2861
2862 // Make the hash mask from the length of the number string cache. It
2863 // contains two elements (number and string) for each cache entry.
2864 __ SmiToInteger32(
2865 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2866 __ shrl(mask, Immediate(1));
2867 __ subq(mask, Immediate(1)); // Make mask.
2868
2869 // Calculate the entry in the number string cache. The hash value in the
2870 // number string cache for smis is just the smi value, and the hash for
2871 // doubles is the xor of the upper and lower words. See
2872 // Heap::GetNumberStringCache.
2873 Label is_smi;
2874 Label load_result_from_cache;
2875 if (!object_is_smi) {
2876 __ JumpIfSmi(object, &is_smi);
2877 __ CheckMap(object, Factory::heap_number_map(), not_found, true);
2878
2879 STATIC_ASSERT(8 == kDoubleSize);
2880 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2881 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2882 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2883
2884 Register index = scratch;
2885 Register probe = mask;
2886 __ movq(probe,
2887 FieldOperand(number_string_cache,
2888 index,
2889 times_1,
2890 FixedArray::kHeaderSize));
2891 __ JumpIfSmi(probe, not_found);
2892 ASSERT(CpuFeatures::IsSupported(SSE2));
2893 CpuFeatures::Scope fscope(SSE2);
2894 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2895 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
2896 __ ucomisd(xmm0, xmm1);
2897 __ j(parity_even, not_found); // Bail out if NaN is involved.
2898 __ j(not_equal, not_found); // The cache did not contain this value.
2899 __ jmp(&load_result_from_cache);
2900 }
2901
2902 __ bind(&is_smi);
2903 __ SmiToInteger32(scratch, object);
2904 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2905
2906 Register index = scratch;
2907 // Check if the entry is the smi we are looking for.
2908 __ cmpq(object,
2909 FieldOperand(number_string_cache,
2910 index,
2911 times_1,
2912 FixedArray::kHeaderSize));
2913 __ j(not_equal, not_found);
2914
2915 // Get the result from the cache.
2916 __ bind(&load_result_from_cache);
2917 __ movq(result,
2918 FieldOperand(number_string_cache,
2919 index,
2920 times_1,
2921 FixedArray::kHeaderSize + kPointerSize));
2922 __ IncrementCounter(&Counters::number_to_string_native, 1);
2923}
2924
2925
2926void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
2927 Register hash,
2928 Register mask) {
2929 __ and_(hash, mask);
2930 // Each entry in string cache consists of two pointer sized fields,
2931 // but times_twice_pointer_size (multiplication by 16) scale factor
2932 // is not supported by addrmode on x64 platform.
2933 // So we have to premultiply entry index before lookup.
2934 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
2935}
2936
2937
2938void NumberToStringStub::Generate(MacroAssembler* masm) {
2939 Label runtime;
2940
2941 __ movq(rbx, Operand(rsp, kPointerSize));
2942
2943 // Generate code to lookup number in the number string cache.
2944 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
2945 __ ret(1 * kPointerSize);
2946
2947 __ bind(&runtime);
2948 // Handle number to string in the runtime system if not found in the cache.
2949 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
2950}
2951
2952
2953static int NegativeComparisonResult(Condition cc) {
2954 ASSERT(cc != equal);
2955 ASSERT((cc == less) || (cc == less_equal)
2956 || (cc == greater) || (cc == greater_equal));
2957 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2958}
2959
2960
2961void CompareStub::Generate(MacroAssembler* masm) {
2962 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2963
2964 Label check_unequal_objects, done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002965
2966 // Compare two smis if required.
2967 if (include_smi_compare_) {
2968 Label non_smi, smi_done;
2969 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
2970 __ subq(rdx, rax);
2971 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002972 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002973 __ bind(&smi_done);
2974 __ movq(rax, rdx);
2975 __ ret(0);
2976 __ bind(&non_smi);
2977 } else if (FLAG_debug_code) {
2978 Label ok;
2979 __ JumpIfNotSmi(rdx, &ok);
2980 __ JumpIfNotSmi(rax, &ok);
2981 __ Abort("CompareStub: smi operands");
2982 __ bind(&ok);
2983 }
2984
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002985 // The compare stub returns a positive, negative, or zero 64-bit integer
2986 // value in rax, corresponding to result of comparing the two inputs.
2987 // NOTICE! This code is only reached after a smi-fast-case check, so
2988 // it is certain that at least one operand isn't a smi.
2989
2990 // Two identical objects are equal unless they are both NaN or undefined.
2991 {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002992 NearLabel not_identical;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002993 __ cmpq(rax, rdx);
2994 __ j(not_equal, &not_identical);
2995
2996 if (cc_ != equal) {
2997 // Check for undefined. undefined OP undefined is false even though
2998 // undefined == undefined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002999 NearLabel check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003000 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
3001 __ j(not_equal, &check_for_nan);
3002 __ Set(rax, NegativeComparisonResult(cc_));
3003 __ ret(0);
3004 __ bind(&check_for_nan);
3005 }
3006
3007 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
3008 // so we do the second best thing - test it ourselves.
3009 // Note: if cc_ != equal, never_nan_nan_ is not used.
3010 // We cannot set rax to EQUAL until just before return because
3011 // rax must be unchanged on jump to not_identical.
3012
3013 if (never_nan_nan_ && (cc_ == equal)) {
3014 __ Set(rax, EQUAL);
3015 __ ret(0);
3016 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003017 NearLabel heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003018 // If it's not a heap number, then return equal for (in)equality operator.
3019 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
3020 Factory::heap_number_map());
3021 __ j(equal, &heap_number);
3022 if (cc_ != equal) {
3023 // Call runtime on identical JSObjects. Otherwise return equal.
3024 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
3025 __ j(above_equal, &not_identical);
3026 }
3027 __ Set(rax, EQUAL);
3028 __ ret(0);
3029
3030 __ bind(&heap_number);
3031 // It is a heap number, so return equal if it's not NaN.
3032 // For NaN, return 1 for every condition except greater and
3033 // greater-equal. Return -1 for them, so the comparison yields
3034 // false for all conditions except not-equal.
3035 __ Set(rax, EQUAL);
3036 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3037 __ ucomisd(xmm0, xmm0);
3038 __ setcc(parity_even, rax);
3039 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
3040 if (cc_ == greater_equal || cc_ == greater) {
3041 __ neg(rax);
3042 }
3043 __ ret(0);
3044 }
3045
3046 __ bind(&not_identical);
3047 }
3048
3049 if (cc_ == equal) { // Both strict and non-strict.
3050 Label slow; // Fallthrough label.
3051
3052 // If we're doing a strict equality comparison, we don't have to do
3053 // type conversion, so we generate code to do fast comparison for objects
3054 // and oddballs. Non-smi numbers and strings still go through the usual
3055 // slow-case code.
3056 if (strict_) {
3057 // If either is a Smi (we know that not both are), then they can only
3058 // be equal if the other is a HeapNumber. If so, use the slow case.
3059 {
3060 Label not_smis;
3061 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
3062
3063 // Check if the non-smi operand is a heap number.
3064 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
3065 Factory::heap_number_map());
3066 // If heap number, handle it in the slow case.
3067 __ j(equal, &slow);
3068 // Return non-equal. ebx (the lower half of rbx) is not zero.
3069 __ movq(rax, rbx);
3070 __ ret(0);
3071
3072 __ bind(&not_smis);
3073 }
3074
3075 // If either operand is a JSObject or an oddball value, then they are not
3076 // equal since their pointers are different
3077 // There is no test for undetectability in strict equality.
3078
3079 // If the first object is a JS object, we have done pointer comparison.
3080 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003081 NearLabel first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003082 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
3083 __ j(below, &first_non_object);
3084 // Return non-zero (eax (not rax) is not zero)
3085 Label return_not_equal;
3086 STATIC_ASSERT(kHeapObjectTag != 0);
3087 __ bind(&return_not_equal);
3088 __ ret(0);
3089
3090 __ bind(&first_non_object);
3091 // Check for oddballs: true, false, null, undefined.
3092 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3093 __ j(equal, &return_not_equal);
3094
3095 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
3096 __ j(above_equal, &return_not_equal);
3097
3098 // Check for oddballs: true, false, null, undefined.
3099 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3100 __ j(equal, &return_not_equal);
3101
3102 // Fall through to the general case.
3103 }
3104 __ bind(&slow);
3105 }
3106
3107 // Generate the number comparison code.
3108 if (include_number_compare_) {
3109 Label non_number_comparison;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003110 NearLabel unordered;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003111 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3112 __ xorl(rax, rax);
3113 __ xorl(rcx, rcx);
3114 __ ucomisd(xmm0, xmm1);
3115
3116 // Don't base result on EFLAGS when a NaN is involved.
3117 __ j(parity_even, &unordered);
3118 // Return a result of -1, 0, or 1, based on EFLAGS.
3119 __ setcc(above, rax);
3120 __ setcc(below, rcx);
3121 __ subq(rax, rcx);
3122 __ ret(0);
3123
3124 // If one of the numbers was NaN, then the result is always false.
3125 // The cc is never not-equal.
3126 __ bind(&unordered);
3127 ASSERT(cc_ != not_equal);
3128 if (cc_ == less || cc_ == less_equal) {
3129 __ Set(rax, 1);
3130 } else {
3131 __ Set(rax, -1);
3132 }
3133 __ ret(0);
3134
3135 // The number comparison code did not provide a valid result.
3136 __ bind(&non_number_comparison);
3137 }
3138
3139 // Fast negative check for symbol-to-symbol equality.
3140 Label check_for_strings;
3141 if (cc_ == equal) {
3142 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
3143 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
3144
3145 // We've already checked for object identity, so if both operands
3146 // are symbols they aren't equal. Register eax (not rax) already holds a
3147 // non-zero value, which indicates not equal, so just return.
3148 __ ret(0);
3149 }
3150
3151 __ bind(&check_for_strings);
3152
3153 __ JumpIfNotBothSequentialAsciiStrings(
3154 rdx, rax, rcx, rbx, &check_unequal_objects);
3155
3156 // Inline comparison of ascii strings.
3157 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3158 rdx,
3159 rax,
3160 rcx,
3161 rbx,
3162 rdi,
3163 r8);
3164
3165#ifdef DEBUG
3166 __ Abort("Unexpected fall-through from string comparison");
3167#endif
3168
3169 __ bind(&check_unequal_objects);
3170 if (cc_ == equal && !strict_) {
3171 // Not strict equality. Objects are unequal if
3172 // they are both JSObjects and not undetectable,
3173 // and their pointers are different.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003174 NearLabel not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003175 // At most one is a smi, so we can test for smi by adding the two.
3176 // A smi plus a heap object has the low bit set, a heap object plus
3177 // a heap object has the low bit clear.
3178 STATIC_ASSERT(kSmiTag == 0);
3179 STATIC_ASSERT(kSmiTagMask == 1);
3180 __ lea(rcx, Operand(rax, rdx, times_1, 0));
3181 __ testb(rcx, Immediate(kSmiTagMask));
3182 __ j(not_zero, &not_both_objects);
3183 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
3184 __ j(below, &not_both_objects);
3185 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
3186 __ j(below, &not_both_objects);
3187 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3188 Immediate(1 << Map::kIsUndetectable));
3189 __ j(zero, &return_unequal);
3190 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3191 Immediate(1 << Map::kIsUndetectable));
3192 __ j(zero, &return_unequal);
3193 // The objects are both undetectable, so they both compare as the value
3194 // undefined, and are equal.
3195 __ Set(rax, EQUAL);
3196 __ bind(&return_unequal);
Steve Block1e0659c2011-05-24 12:43:12 +01003197 // Return non-equal by returning the non-zero object pointer in rax,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003198 // or return equal if we fell through to here.
3199 __ ret(0);
3200 __ bind(&not_both_objects);
3201 }
3202
3203 // Push arguments below the return address to prepare jump to builtin.
3204 __ pop(rcx);
3205 __ push(rdx);
3206 __ push(rax);
3207
3208 // Figure out which native to call and setup the arguments.
3209 Builtins::JavaScript builtin;
3210 if (cc_ == equal) {
3211 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3212 } else {
3213 builtin = Builtins::COMPARE;
3214 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
3215 }
3216
3217 // Restore return address on the stack.
3218 __ push(rcx);
3219
3220 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3221 // tagged as a small integer.
3222 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3223}
3224
3225
3226void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3227 Label* label,
3228 Register object,
3229 Register scratch) {
3230 __ JumpIfSmi(object, label);
3231 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3232 __ movzxbq(scratch,
3233 FieldOperand(scratch, Map::kInstanceTypeOffset));
3234 // Ensure that no non-strings have the symbol bit set.
3235 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3236 STATIC_ASSERT(kSymbolTag != 0);
3237 __ testb(scratch, Immediate(kIsSymbolMask));
3238 __ j(zero, label);
3239}
3240
3241
3242void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01003243 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003244}
3245
3246
3247void CallFunctionStub::Generate(MacroAssembler* masm) {
3248 Label slow;
3249
3250 // If the receiver might be a value (string, number or boolean) check for this
3251 // and box it if it is.
3252 if (ReceiverMightBeValue()) {
3253 // Get the receiver from the stack.
3254 // +1 ~ return address
3255 Label receiver_is_value, receiver_is_js_object;
3256 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
3257
3258 // Check if receiver is a smi (which is a number value).
3259 __ JumpIfSmi(rax, &receiver_is_value);
3260
3261 // Check if the receiver is a valid JS object.
3262 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
3263 __ j(above_equal, &receiver_is_js_object);
3264
3265 // Call the runtime to box the value.
3266 __ bind(&receiver_is_value);
3267 __ EnterInternalFrame();
3268 __ push(rax);
3269 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3270 __ LeaveInternalFrame();
3271 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
3272
3273 __ bind(&receiver_is_js_object);
3274 }
3275
3276 // Get the function to call from the stack.
3277 // +2 ~ receiver, return address
3278 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
3279
3280 // Check that the function really is a JavaScript function.
3281 __ JumpIfSmi(rdi, &slow);
3282 // Goto slow case if we do not have a function.
3283 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3284 __ j(not_equal, &slow);
3285
3286 // Fast-case: Just invoke the function.
3287 ParameterCount actual(argc_);
3288 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
3289
3290 // Slow-case: Non-function called.
3291 __ bind(&slow);
3292 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3293 // of the original receiver from the call site).
3294 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
3295 __ Set(rax, argc_);
3296 __ Set(rbx, 0);
3297 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3298 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
3299 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3300}
3301
3302
3303void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003304 // Throw exception in eax.
3305 __ Throw(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003306}
3307
3308
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003309void CEntryStub::GenerateCore(MacroAssembler* masm,
3310 Label* throw_normal_exception,
3311 Label* throw_termination_exception,
3312 Label* throw_out_of_memory_exception,
3313 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01003314 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003315 // rax: result parameter for PerformGC, if any.
3316 // rbx: pointer to C function (C callee-saved).
3317 // rbp: frame pointer (restored after C call).
3318 // rsp: stack pointer (restored after C call).
3319 // r14: number of arguments including receiver (C callee-saved).
3320 // r12: pointer to the first argument (C callee-saved).
3321 // This pointer is reused in LeaveExitFrame(), so it is stored in a
3322 // callee-saved register.
3323
3324 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
3325 // Complex results must be written to address passed as first argument.
3326 // AMD64 calling convention: a struct of two pointers in rax+rdx
3327
3328 // Check stack alignment.
3329 if (FLAG_debug_code) {
3330 __ CheckStackAlignment();
3331 }
3332
3333 if (do_gc) {
3334 // Pass failure code returned from last attempt as first argument to
3335 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
3336 // stack is known to be aligned. This function takes one argument which is
3337 // passed in register.
3338#ifdef _WIN64
3339 __ movq(rcx, rax);
3340#else // _WIN64
3341 __ movq(rdi, rax);
3342#endif
3343 __ movq(kScratchRegister,
3344 FUNCTION_ADDR(Runtime::PerformGC),
3345 RelocInfo::RUNTIME_ENTRY);
3346 __ call(kScratchRegister);
3347 }
3348
3349 ExternalReference scope_depth =
3350 ExternalReference::heap_always_allocate_scope_depth();
3351 if (always_allocate_scope) {
3352 __ movq(kScratchRegister, scope_depth);
3353 __ incl(Operand(kScratchRegister, 0));
3354 }
3355
3356 // Call C function.
3357#ifdef _WIN64
3358 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
3359 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003360 __ movq(StackSpaceOperand(0), r14); // argc.
3361 __ movq(StackSpaceOperand(1), r12); // argv.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003362 if (result_size_ < 2) {
3363 // Pass a pointer to the Arguments object as the first argument.
3364 // Return result in single register (rax).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003365 __ lea(rcx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003366 } else {
3367 ASSERT_EQ(2, result_size_);
3368 // Pass a pointer to the result location as the first argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003369 __ lea(rcx, StackSpaceOperand(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003370 // Pass a pointer to the Arguments object as the second argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003371 __ lea(rdx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003372 }
3373
3374#else // _WIN64
3375 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
3376 __ movq(rdi, r14); // argc.
3377 __ movq(rsi, r12); // argv.
3378#endif
3379 __ call(rbx);
3380 // Result is in rax - do not destroy this register!
3381
3382 if (always_allocate_scope) {
3383 __ movq(kScratchRegister, scope_depth);
3384 __ decl(Operand(kScratchRegister, 0));
3385 }
3386
3387 // Check for failure result.
3388 Label failure_returned;
3389 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3390#ifdef _WIN64
3391 // If return value is on the stack, pop it to registers.
3392 if (result_size_ > 1) {
3393 ASSERT_EQ(2, result_size_);
3394 // Read result values stored on stack. Result is stored
3395 // above the four argument mirror slots and the two
3396 // Arguments object slots.
3397 __ movq(rax, Operand(rsp, 6 * kPointerSize));
3398 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
3399 }
3400#endif
3401 __ lea(rcx, Operand(rax, 1));
3402 // Lower 2 bits of rcx are 0 iff rax has failure tag.
3403 __ testl(rcx, Immediate(kFailureTagMask));
3404 __ j(zero, &failure_returned);
3405
3406 // Exit the JavaScript to C++ exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +01003407 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003408 __ ret(0);
3409
3410 // Handling of failure.
3411 __ bind(&failure_returned);
3412
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003413 NearLabel retry;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003414 // If the returned exception is RETRY_AFTER_GC continue at retry label
3415 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3416 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3417 __ j(zero, &retry);
3418
3419 // Special handling of out of memory exceptions.
3420 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
3421 __ cmpq(rax, kScratchRegister);
3422 __ j(equal, throw_out_of_memory_exception);
3423
3424 // Retrieve the pending exception and clear the variable.
3425 ExternalReference pending_exception_address(Top::k_pending_exception_address);
3426 __ movq(kScratchRegister, pending_exception_address);
3427 __ movq(rax, Operand(kScratchRegister, 0));
3428 __ movq(rdx, ExternalReference::the_hole_value_location());
3429 __ movq(rdx, Operand(rdx, 0));
3430 __ movq(Operand(kScratchRegister, 0), rdx);
3431
3432 // Special handling of termination exceptions which are uncatchable
3433 // by javascript code.
3434 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
3435 __ j(equal, throw_termination_exception);
3436
3437 // Handle normal exception.
3438 __ jmp(throw_normal_exception);
3439
3440 // Retry.
3441 __ bind(&retry);
3442}
3443
3444
3445void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3446 UncatchableExceptionType type) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003447 __ ThrowUncatchable(type, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003448}
3449
3450
3451void CEntryStub::Generate(MacroAssembler* masm) {
3452 // rax: number of arguments including receiver
3453 // rbx: pointer to C function (C callee-saved)
3454 // rbp: frame pointer of calling JS frame (restored after C call)
3455 // rsp: stack pointer (restored after C call)
3456 // rsi: current context (restored)
3457
3458 // NOTE: Invocations of builtins may return failure objects
3459 // instead of a proper result. The builtin entry handles
3460 // this by performing a garbage collection and retrying the
3461 // builtin once.
3462
3463 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003464#ifdef _WIN64
3465 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
3466#else
3467 int arg_stack_space = 0;
3468#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003469 __ EnterExitFrame(arg_stack_space, save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003470
3471 // rax: Holds the context at this point, but should not be used.
3472 // On entry to code generated by GenerateCore, it must hold
3473 // a failure result if the collect_garbage argument to GenerateCore
3474 // is true. This failure result can be the result of code
3475 // generated by a previous call to GenerateCore. The value
3476 // of rax is then passed to Runtime::PerformGC.
3477 // rbx: pointer to builtin function (C callee-saved).
3478 // rbp: frame pointer of exit frame (restored after C call).
3479 // rsp: stack pointer (restored after C call).
3480 // r14: number of arguments including receiver (C callee-saved).
3481 // r12: argv pointer (C callee-saved).
3482
3483 Label throw_normal_exception;
3484 Label throw_termination_exception;
3485 Label throw_out_of_memory_exception;
3486
3487 // Call into the runtime system.
3488 GenerateCore(masm,
3489 &throw_normal_exception,
3490 &throw_termination_exception,
3491 &throw_out_of_memory_exception,
3492 false,
3493 false);
3494
3495 // Do space-specific GC and retry runtime call.
3496 GenerateCore(masm,
3497 &throw_normal_exception,
3498 &throw_termination_exception,
3499 &throw_out_of_memory_exception,
3500 true,
3501 false);
3502
3503 // Do full GC and retry runtime call one final time.
3504 Failure* failure = Failure::InternalError();
3505 __ movq(rax, failure, RelocInfo::NONE);
3506 GenerateCore(masm,
3507 &throw_normal_exception,
3508 &throw_termination_exception,
3509 &throw_out_of_memory_exception,
3510 true,
3511 true);
3512
3513 __ bind(&throw_out_of_memory_exception);
3514 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
3515
3516 __ bind(&throw_termination_exception);
3517 GenerateThrowUncatchable(masm, TERMINATION);
3518
3519 __ bind(&throw_normal_exception);
3520 GenerateThrowTOS(masm);
3521}
3522
3523
3524void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3525 Label invoke, exit;
3526#ifdef ENABLE_LOGGING_AND_PROFILING
3527 Label not_outermost_js, not_outermost_js_2;
3528#endif
3529
3530 // Setup frame.
3531 __ push(rbp);
3532 __ movq(rbp, rsp);
3533
3534 // Push the stack frame type marker twice.
3535 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3536 // Scratch register is neither callee-save, nor an argument register on any
3537 // platform. It's free to use at this point.
3538 // Cannot use smi-register for loading yet.
3539 __ movq(kScratchRegister,
3540 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3541 RelocInfo::NONE);
3542 __ push(kScratchRegister); // context slot
3543 __ push(kScratchRegister); // function slot
3544 // Save callee-saved registers (X64/Win64 calling conventions).
3545 __ push(r12);
3546 __ push(r13);
3547 __ push(r14);
3548 __ push(r15);
3549#ifdef _WIN64
3550 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3551 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3552#endif
3553 __ push(rbx);
3554 // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3555 // callee save as well.
3556
3557 // Save copies of the top frame descriptor on the stack.
3558 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
3559 __ load_rax(c_entry_fp);
3560 __ push(rax);
3561
3562 // Set up the roots and smi constant registers.
3563 // Needs to be done before any further smi loads.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003564 __ InitializeRootRegister();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003565 __ InitializeSmiConstantRegister();
3566
3567#ifdef ENABLE_LOGGING_AND_PROFILING
3568 // If this is the outermost JS call, set js_entry_sp value.
3569 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
3570 __ load_rax(js_entry_sp);
3571 __ testq(rax, rax);
3572 __ j(not_zero, &not_outermost_js);
3573 __ movq(rax, rbp);
3574 __ store_rax(js_entry_sp);
3575 __ bind(&not_outermost_js);
3576#endif
3577
3578 // Call a faked try-block that does the invoke.
3579 __ call(&invoke);
3580
3581 // Caught exception: Store result (exception) in the pending
3582 // exception field in the JSEnv and return a failure sentinel.
3583 ExternalReference pending_exception(Top::k_pending_exception_address);
3584 __ store_rax(pending_exception);
3585 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
3586 __ jmp(&exit);
3587
3588 // Invoke: Link this frame into the handler chain.
3589 __ bind(&invoke);
3590 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3591
3592 // Clear any pending exceptions.
3593 __ load_rax(ExternalReference::the_hole_value_location());
3594 __ store_rax(pending_exception);
3595
3596 // Fake a receiver (NULL).
3597 __ push(Immediate(0)); // receiver
3598
3599 // Invoke the function by calling through JS entry trampoline
3600 // builtin and pop the faked function when we return. We load the address
3601 // from an external reference instead of inlining the call target address
3602 // directly in the code, because the builtin stubs may not have been
3603 // generated yet at the time this code is generated.
3604 if (is_construct) {
3605 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
3606 __ load_rax(construct_entry);
3607 } else {
3608 ExternalReference entry(Builtins::JSEntryTrampoline);
3609 __ load_rax(entry);
3610 }
3611 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3612 __ call(kScratchRegister);
3613
3614 // Unlink this frame from the handler chain.
3615 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
3616 __ pop(Operand(kScratchRegister, 0));
3617 // Pop next_sp.
3618 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
3619
3620#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block1e0659c2011-05-24 12:43:12 +01003621 // If current RBP value is the same as js_entry_sp value, it means that
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003622 // the current function is the outermost.
3623 __ movq(kScratchRegister, js_entry_sp);
3624 __ cmpq(rbp, Operand(kScratchRegister, 0));
3625 __ j(not_equal, &not_outermost_js_2);
3626 __ movq(Operand(kScratchRegister, 0), Immediate(0));
3627 __ bind(&not_outermost_js_2);
3628#endif
3629
3630 // Restore the top frame descriptor from the stack.
3631 __ bind(&exit);
3632 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
3633 __ pop(Operand(kScratchRegister, 0));
3634
3635 // Restore callee-saved registers (X64 conventions).
3636 __ pop(rbx);
3637#ifdef _WIN64
3638 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
3639 __ pop(rsi);
3640 __ pop(rdi);
3641#endif
3642 __ pop(r15);
3643 __ pop(r14);
3644 __ pop(r13);
3645 __ pop(r12);
3646 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3647
3648 // Restore frame pointer and return.
3649 __ pop(rbp);
3650 __ ret(0);
3651}
3652
3653
3654void InstanceofStub::Generate(MacroAssembler* masm) {
3655 // Implements "value instanceof function" operator.
3656 // Expected input state:
3657 // rsp[0] : return address
3658 // rsp[1] : function pointer
3659 // rsp[2] : value
3660 // Returns a bitwise zero to indicate that the value
3661 // is and instance of the function and anything else to
3662 // indicate that the value is not an instance.
3663
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003664 // None of the flags are supported on X64.
3665 ASSERT(flags_ == kNoFlags);
3666
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003667 // Get the object - go slow case if it's a smi.
3668 Label slow;
3669 __ movq(rax, Operand(rsp, 2 * kPointerSize));
3670 __ JumpIfSmi(rax, &slow);
3671
3672 // Check that the left hand is a JS object. Leave its map in rax.
3673 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
3674 __ j(below, &slow);
3675 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
3676 __ j(above, &slow);
3677
3678 // Get the prototype of the function.
3679 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
3680 // rdx is function, rax is map.
3681
3682 // Look up the function and the map in the instanceof cache.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003683 NearLabel miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003684 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3685 __ j(not_equal, &miss);
3686 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3687 __ j(not_equal, &miss);
3688 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3689 __ ret(2 * kPointerSize);
3690
3691 __ bind(&miss);
3692 __ TryGetFunctionPrototype(rdx, rbx, &slow);
3693
3694 // Check that the function prototype is a JS object.
3695 __ JumpIfSmi(rbx, &slow);
3696 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
3697 __ j(below, &slow);
3698 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3699 __ j(above, &slow);
3700
3701 // Register mapping:
3702 // rax is object map.
3703 // rdx is function.
3704 // rbx is function prototype.
3705 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3706 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3707
3708 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3709
3710 // Loop through the prototype chain looking for the function prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003711 NearLabel loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003712 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
3713 __ bind(&loop);
3714 __ cmpq(rcx, rbx);
3715 __ j(equal, &is_instance);
3716 __ cmpq(rcx, kScratchRegister);
3717 // The code at is_not_instance assumes that kScratchRegister contains a
3718 // non-zero GCable value (the null object in this case).
3719 __ j(equal, &is_not_instance);
3720 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3721 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
3722 __ jmp(&loop);
3723
3724 __ bind(&is_instance);
3725 __ xorl(rax, rax);
3726 // Store bitwise zero in the cache. This is a Smi in GC terms.
3727 STATIC_ASSERT(kSmiTag == 0);
3728 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3729 __ ret(2 * kPointerSize);
3730
3731 __ bind(&is_not_instance);
3732 // We have to store a non-zero value in the cache.
3733 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3734 __ ret(2 * kPointerSize);
3735
3736 // Slow-case: Go through the JavaScript implementation.
3737 __ bind(&slow);
3738 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3739}
3740
3741
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003742// Passing arguments in registers is not supported.
3743Register InstanceofStub::left() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003744
3745
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003746Register InstanceofStub::right() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003747
3748
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003749int CompareStub::MinorKey() {
3750 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3751 // stubs the never NaN NaN condition is only taken into account if the
3752 // condition is equals.
3753 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
3754 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3755 return ConditionField::encode(static_cast<unsigned>(cc_))
3756 | RegisterField::encode(false) // lhs_ and rhs_ are not used
3757 | StrictField::encode(strict_)
3758 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003759 | IncludeNumberCompareField::encode(include_number_compare_)
3760 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003761}
3762
3763
3764// Unfortunately you have to run without snapshots to see most of these
3765// names in the profile since most compare stubs end up in the snapshot.
3766const char* CompareStub::GetName() {
3767 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3768
3769 if (name_ != NULL) return name_;
3770 const int kMaxNameLength = 100;
3771 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
3772 if (name_ == NULL) return "OOM";
3773
3774 const char* cc_name;
3775 switch (cc_) {
3776 case less: cc_name = "LT"; break;
3777 case greater: cc_name = "GT"; break;
3778 case less_equal: cc_name = "LE"; break;
3779 case greater_equal: cc_name = "GE"; break;
3780 case equal: cc_name = "EQ"; break;
3781 case not_equal: cc_name = "NE"; break;
3782 default: cc_name = "UnknownCondition"; break;
3783 }
3784
3785 const char* strict_name = "";
3786 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
3787 strict_name = "_STRICT";
3788 }
3789
3790 const char* never_nan_nan_name = "";
3791 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
3792 never_nan_nan_name = "_NO_NAN";
3793 }
3794
3795 const char* include_number_compare_name = "";
3796 if (!include_number_compare_) {
3797 include_number_compare_name = "_NO_NUMBER";
3798 }
3799
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003800 const char* include_smi_compare_name = "";
3801 if (!include_smi_compare_) {
3802 include_smi_compare_name = "_NO_SMI";
3803 }
3804
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003805 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3806 "CompareStub_%s%s%s%s",
3807 cc_name,
3808 strict_name,
3809 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003810 include_number_compare_name,
3811 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003812 return name_;
3813}
3814
3815
3816// -------------------------------------------------------------------------
3817// StringCharCodeAtGenerator
3818
3819void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3820 Label flat_string;
3821 Label ascii_string;
3822 Label got_char_code;
3823
3824 // If the receiver is a smi trigger the non-string case.
3825 __ JumpIfSmi(object_, receiver_not_string_);
3826
3827 // Fetch the instance type of the receiver into result register.
3828 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3829 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3830 // If the receiver is not a string trigger the non-string case.
3831 __ testb(result_, Immediate(kIsNotStringMask));
3832 __ j(not_zero, receiver_not_string_);
3833
3834 // If the index is non-smi trigger the non-smi case.
3835 __ JumpIfNotSmi(index_, &index_not_smi_);
3836
3837 // Put smi-tagged index into scratch register.
3838 __ movq(scratch_, index_);
3839 __ bind(&got_smi_index_);
3840
3841 // Check for index out of range.
3842 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
3843 __ j(above_equal, index_out_of_range_);
3844
3845 // We need special handling for non-flat strings.
3846 STATIC_ASSERT(kSeqStringTag == 0);
3847 __ testb(result_, Immediate(kStringRepresentationMask));
3848 __ j(zero, &flat_string);
3849
3850 // Handle non-flat strings.
3851 __ testb(result_, Immediate(kIsConsStringMask));
3852 __ j(zero, &call_runtime_);
3853
3854 // ConsString.
3855 // Check whether the right hand side is the empty string (i.e. if
3856 // this is really a flat string in a cons string). If that is not
3857 // the case we would rather go to the runtime system now to flatten
3858 // the string.
3859 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
3860 Heap::kEmptyStringRootIndex);
3861 __ j(not_equal, &call_runtime_);
3862 // Get the first of the two strings and load its instance type.
3863 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
3864 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3865 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3866 // If the first cons component is also non-flat, then go to runtime.
3867 STATIC_ASSERT(kSeqStringTag == 0);
3868 __ testb(result_, Immediate(kStringRepresentationMask));
3869 __ j(not_zero, &call_runtime_);
3870
3871 // Check for 1-byte or 2-byte string.
3872 __ bind(&flat_string);
3873 STATIC_ASSERT(kAsciiStringTag != 0);
3874 __ testb(result_, Immediate(kStringEncodingMask));
3875 __ j(not_zero, &ascii_string);
3876
3877 // 2-byte string.
3878 // Load the 2-byte character code into the result register.
3879 __ SmiToInteger32(scratch_, scratch_);
3880 __ movzxwl(result_, FieldOperand(object_,
3881 scratch_, times_2,
3882 SeqTwoByteString::kHeaderSize));
3883 __ jmp(&got_char_code);
3884
3885 // ASCII string.
3886 // Load the byte into the result register.
3887 __ bind(&ascii_string);
3888 __ SmiToInteger32(scratch_, scratch_);
3889 __ movzxbl(result_, FieldOperand(object_,
3890 scratch_, times_1,
3891 SeqAsciiString::kHeaderSize));
3892 __ bind(&got_char_code);
3893 __ Integer32ToSmi(result_, result_);
3894 __ bind(&exit_);
3895}
3896
3897
3898void StringCharCodeAtGenerator::GenerateSlow(
3899 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3900 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
3901
3902 // Index is not a smi.
3903 __ bind(&index_not_smi_);
3904 // If index is a heap number, try converting it to an integer.
3905 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
3906 call_helper.BeforeCall(masm);
3907 __ push(object_);
3908 __ push(index_);
3909 __ push(index_); // Consumed by runtime conversion function.
3910 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3911 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3912 } else {
3913 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3914 // NumberToSmi discards numbers that are not exact integers.
3915 __ CallRuntime(Runtime::kNumberToSmi, 1);
3916 }
3917 if (!scratch_.is(rax)) {
3918 // Save the conversion result before the pop instructions below
3919 // have a chance to overwrite it.
3920 __ movq(scratch_, rax);
3921 }
3922 __ pop(index_);
3923 __ pop(object_);
3924 // Reload the instance type.
3925 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3926 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3927 call_helper.AfterCall(masm);
3928 // If index is still not a smi, it must be out of range.
3929 __ JumpIfNotSmi(scratch_, index_out_of_range_);
3930 // Otherwise, return to the fast path.
3931 __ jmp(&got_smi_index_);
3932
3933 // Call runtime. We get here when the receiver is a string and the
3934 // index is a number, but the code of getting the actual character
3935 // is too complex (e.g., when the string needs to be flattened).
3936 __ bind(&call_runtime_);
3937 call_helper.BeforeCall(masm);
3938 __ push(object_);
3939 __ push(index_);
3940 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3941 if (!result_.is(rax)) {
3942 __ movq(result_, rax);
3943 }
3944 call_helper.AfterCall(masm);
3945 __ jmp(&exit_);
3946
3947 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
3948}
3949
3950
3951// -------------------------------------------------------------------------
3952// StringCharFromCodeGenerator
3953
3954void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3955 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3956 __ JumpIfNotSmi(code_, &slow_case_);
3957 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
3958 __ j(above, &slow_case_);
3959
3960 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3961 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3962 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
3963 FixedArray::kHeaderSize));
3964 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3965 __ j(equal, &slow_case_);
3966 __ bind(&exit_);
3967}
3968
3969
3970void StringCharFromCodeGenerator::GenerateSlow(
3971 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3972 __ Abort("Unexpected fallthrough to CharFromCode slow case");
3973
3974 __ bind(&slow_case_);
3975 call_helper.BeforeCall(masm);
3976 __ push(code_);
3977 __ CallRuntime(Runtime::kCharFromCode, 1);
3978 if (!result_.is(rax)) {
3979 __ movq(result_, rax);
3980 }
3981 call_helper.AfterCall(masm);
3982 __ jmp(&exit_);
3983
3984 __ Abort("Unexpected fallthrough from CharFromCode slow case");
3985}
3986
3987
3988// -------------------------------------------------------------------------
3989// StringCharAtGenerator
3990
3991void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
3992 char_code_at_generator_.GenerateFast(masm);
3993 char_from_code_generator_.GenerateFast(masm);
3994}
3995
3996
3997void StringCharAtGenerator::GenerateSlow(
3998 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3999 char_code_at_generator_.GenerateSlow(masm, call_helper);
4000 char_from_code_generator_.GenerateSlow(masm, call_helper);
4001}
4002
4003
4004void StringAddStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004005 Label string_add_runtime, call_builtin;
4006 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004007
4008 // Load the two arguments.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004009 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
4010 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004011
4012 // Make sure that both arguments are strings if not known in advance.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004013 if (flags_ == NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004014 Condition is_smi;
4015 is_smi = masm->CheckSmi(rax);
4016 __ j(is_smi, &string_add_runtime);
4017 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4018 __ j(above_equal, &string_add_runtime);
4019
4020 // First argument is a a string, test second.
4021 is_smi = masm->CheckSmi(rdx);
4022 __ j(is_smi, &string_add_runtime);
4023 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
4024 __ j(above_equal, &string_add_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004025 } else {
4026 // Here at least one of the arguments is definitely a string.
4027 // We convert the one that is not known to be a string.
4028 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
4029 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
4030 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
4031 &call_builtin);
4032 builtin_id = Builtins::STRING_ADD_RIGHT;
4033 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
4034 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
4035 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
4036 &call_builtin);
4037 builtin_id = Builtins::STRING_ADD_LEFT;
4038 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004039 }
4040
4041 // Both arguments are strings.
4042 // rax: first string
4043 // rdx: second string
4044 // Check if either of the strings are empty. In that case return the other.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004045 NearLabel second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004046 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
4047 __ SmiTest(rcx);
4048 __ j(not_zero, &second_not_zero_length);
4049 // Second string is empty, result is first string which is already in rax.
4050 __ IncrementCounter(&Counters::string_add_native, 1);
4051 __ ret(2 * kPointerSize);
4052 __ bind(&second_not_zero_length);
4053 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
4054 __ SmiTest(rbx);
4055 __ j(not_zero, &both_not_zero_length);
4056 // First string is empty, result is second string which is in rdx.
4057 __ movq(rax, rdx);
4058 __ IncrementCounter(&Counters::string_add_native, 1);
4059 __ ret(2 * kPointerSize);
4060
4061 // Both strings are non-empty.
4062 // rax: first string
4063 // rbx: length of first string
4064 // rcx: length of second string
4065 // rdx: second string
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004066 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
4067 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004068 Label string_add_flat_result, longer_than_two;
4069 __ bind(&both_not_zero_length);
4070
4071 // If arguments where known to be strings, maps are not loaded to r8 and r9
4072 // by the code above.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004073 if (flags_ != NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004074 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
4075 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
4076 }
4077 // Get the instance types of the two strings as they will be needed soon.
4078 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
4079 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
4080
4081 // Look at the length of the result of adding the two strings.
4082 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004083 __ SmiAdd(rbx, rbx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004084 // Use the runtime system when adding two one character strings, as it
4085 // contains optimizations for this specific case using the symbol table.
4086 __ SmiCompare(rbx, Smi::FromInt(2));
4087 __ j(not_equal, &longer_than_two);
4088
4089 // Check that both strings are non-external ascii strings.
4090 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
4091 &string_add_runtime);
4092
4093 // Get the two characters forming the sub string.
4094 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4095 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
4096
4097 // Try to lookup two character string in symbol table. If it is not found
4098 // just allocate a new one.
4099 Label make_two_character_string, make_flat_ascii_string;
4100 StringHelper::GenerateTwoCharacterSymbolTableProbe(
4101 masm, rbx, rcx, r14, r11, rdi, r12, &make_two_character_string);
4102 __ IncrementCounter(&Counters::string_add_native, 1);
4103 __ ret(2 * kPointerSize);
4104
4105 __ bind(&make_two_character_string);
4106 __ Set(rbx, 2);
4107 __ jmp(&make_flat_ascii_string);
4108
4109 __ bind(&longer_than_two);
4110 // Check if resulting string will be flat.
4111 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
4112 __ j(below, &string_add_flat_result);
4113 // Handle exceptionally long strings in the runtime system.
4114 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
4115 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
4116 __ j(above, &string_add_runtime);
4117
4118 // If result is not supposed to be flat, allocate a cons string object. If
4119 // both strings are ascii the result is an ascii cons string.
4120 // rax: first string
4121 // rbx: length of resulting flat string
4122 // rdx: second string
4123 // r8: instance type of first string
4124 // r9: instance type of second string
4125 Label non_ascii, allocated, ascii_data;
4126 __ movl(rcx, r8);
4127 __ and_(rcx, r9);
4128 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4129 __ testl(rcx, Immediate(kAsciiStringTag));
4130 __ j(zero, &non_ascii);
4131 __ bind(&ascii_data);
4132 // Allocate an acsii cons string.
4133 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
4134 __ bind(&allocated);
4135 // Fill the fields of the cons string.
4136 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
4137 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
4138 Immediate(String::kEmptyHashField));
4139 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
4140 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
4141 __ movq(rax, rcx);
4142 __ IncrementCounter(&Counters::string_add_native, 1);
4143 __ ret(2 * kPointerSize);
4144 __ bind(&non_ascii);
4145 // At least one of the strings is two-byte. Check whether it happens
4146 // to contain only ascii characters.
4147 // rcx: first instance type AND second instance type.
4148 // r8: first instance type.
4149 // r9: second instance type.
4150 __ testb(rcx, Immediate(kAsciiDataHintMask));
4151 __ j(not_zero, &ascii_data);
4152 __ xor_(r8, r9);
4153 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
4154 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4155 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4156 __ j(equal, &ascii_data);
4157 // Allocate a two byte cons string.
4158 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
4159 __ jmp(&allocated);
4160
4161 // Handle creating a flat result. First check that both strings are not
4162 // external strings.
4163 // rax: first string
4164 // rbx: length of resulting flat string as smi
4165 // rdx: second string
4166 // r8: instance type of first string
4167 // r9: instance type of first string
4168 __ bind(&string_add_flat_result);
4169 __ SmiToInteger32(rbx, rbx);
4170 __ movl(rcx, r8);
4171 __ and_(rcx, Immediate(kStringRepresentationMask));
4172 __ cmpl(rcx, Immediate(kExternalStringTag));
4173 __ j(equal, &string_add_runtime);
4174 __ movl(rcx, r9);
4175 __ and_(rcx, Immediate(kStringRepresentationMask));
4176 __ cmpl(rcx, Immediate(kExternalStringTag));
4177 __ j(equal, &string_add_runtime);
4178 // Now check if both strings are ascii strings.
4179 // rax: first string
4180 // rbx: length of resulting flat string
4181 // rdx: second string
4182 // r8: instance type of first string
4183 // r9: instance type of second string
4184 Label non_ascii_string_add_flat_result;
4185 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4186 __ testl(r8, Immediate(kAsciiStringTag));
4187 __ j(zero, &non_ascii_string_add_flat_result);
4188 __ testl(r9, Immediate(kAsciiStringTag));
4189 __ j(zero, &string_add_runtime);
4190
4191 __ bind(&make_flat_ascii_string);
4192 // Both strings are ascii strings. As they are short they are both flat.
4193 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4194 // rcx: result string
4195 __ movq(rbx, rcx);
4196 // Locate first character of result.
4197 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4198 // Locate first character of first argument
4199 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4200 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4201 // rax: first char of first argument
4202 // rbx: result string
4203 // rcx: first character of result
4204 // rdx: second string
4205 // rdi: length of first argument
4206 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
4207 // Locate first character of second argument.
4208 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4209 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4210 // rbx: result string
4211 // rcx: next character of result
4212 // rdx: first char of second argument
4213 // rdi: length of second argument
4214 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
4215 __ movq(rax, rbx);
4216 __ IncrementCounter(&Counters::string_add_native, 1);
4217 __ ret(2 * kPointerSize);
4218
4219 // Handle creating a flat two byte result.
4220 // rax: first string - known to be two byte
4221 // rbx: length of resulting flat string
4222 // rdx: second string
4223 // r8: instance type of first string
4224 // r9: instance type of first string
4225 __ bind(&non_ascii_string_add_flat_result);
4226 __ and_(r9, Immediate(kAsciiStringTag));
4227 __ j(not_zero, &string_add_runtime);
4228 // Both strings are two byte strings. As they are short they are both
4229 // flat.
4230 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4231 // rcx: result string
4232 __ movq(rbx, rcx);
4233 // Locate first character of result.
4234 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4235 // Locate first character of first argument.
4236 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4237 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4238 // rax: first char of first argument
4239 // rbx: result string
4240 // rcx: first character of result
4241 // rdx: second argument
4242 // rdi: length of first argument
4243 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
4244 // Locate first character of second argument.
4245 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4246 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4247 // rbx: result string
4248 // rcx: next character of result
4249 // rdx: first char of second argument
4250 // rdi: length of second argument
4251 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
4252 __ movq(rax, rbx);
4253 __ IncrementCounter(&Counters::string_add_native, 1);
4254 __ ret(2 * kPointerSize);
4255
4256 // Just jump to runtime to add the two strings.
4257 __ bind(&string_add_runtime);
4258 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004259
4260 if (call_builtin.is_linked()) {
4261 __ bind(&call_builtin);
4262 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
4263 }
4264}
4265
4266
4267void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4268 int stack_offset,
4269 Register arg,
4270 Register scratch1,
4271 Register scratch2,
4272 Register scratch3,
4273 Label* slow) {
4274 // First check if the argument is already a string.
4275 Label not_string, done;
4276 __ JumpIfSmi(arg, &not_string);
4277 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
4278 __ j(below, &done);
4279
4280 // Check the number to string cache.
4281 Label not_cached;
4282 __ bind(&not_string);
4283 // Puts the cached result into scratch1.
4284 NumberToStringStub::GenerateLookupNumberStringCache(masm,
4285 arg,
4286 scratch1,
4287 scratch2,
4288 scratch3,
4289 false,
4290 &not_cached);
4291 __ movq(arg, scratch1);
4292 __ movq(Operand(rsp, stack_offset), arg);
4293 __ jmp(&done);
4294
4295 // Check if the argument is a safe string wrapper.
4296 __ bind(&not_cached);
4297 __ JumpIfSmi(arg, slow);
4298 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
4299 __ j(not_equal, slow);
4300 __ testb(FieldOperand(scratch1, Map::kBitField2Offset),
4301 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
4302 __ j(zero, slow);
4303 __ movq(arg, FieldOperand(arg, JSValue::kValueOffset));
4304 __ movq(Operand(rsp, stack_offset), arg);
4305
4306 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004307}
4308
4309
4310void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
4311 Register dest,
4312 Register src,
4313 Register count,
4314 bool ascii) {
4315 Label loop;
4316 __ bind(&loop);
4317 // This loop just copies one character at a time, as it is only used for very
4318 // short strings.
4319 if (ascii) {
4320 __ movb(kScratchRegister, Operand(src, 0));
4321 __ movb(Operand(dest, 0), kScratchRegister);
4322 __ incq(src);
4323 __ incq(dest);
4324 } else {
4325 __ movzxwl(kScratchRegister, Operand(src, 0));
4326 __ movw(Operand(dest, 0), kScratchRegister);
4327 __ addq(src, Immediate(2));
4328 __ addq(dest, Immediate(2));
4329 }
4330 __ decl(count);
4331 __ j(not_zero, &loop);
4332}
4333
4334
4335void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4336 Register dest,
4337 Register src,
4338 Register count,
4339 bool ascii) {
4340 // Copy characters using rep movs of doublewords. Align destination on 4 byte
4341 // boundary before starting rep movs. Copy remaining characters after running
4342 // rep movs.
4343 // Count is positive int32, dest and src are character pointers.
4344 ASSERT(dest.is(rdi)); // rep movs destination
4345 ASSERT(src.is(rsi)); // rep movs source
4346 ASSERT(count.is(rcx)); // rep movs count
4347
4348 // Nothing to do for zero characters.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004349 NearLabel done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004350 __ testl(count, count);
4351 __ j(zero, &done);
4352
4353 // Make count the number of bytes to copy.
4354 if (!ascii) {
4355 STATIC_ASSERT(2 == sizeof(uc16));
4356 __ addl(count, count);
4357 }
4358
4359 // Don't enter the rep movs if there are less than 4 bytes to copy.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004360 NearLabel last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004361 __ testl(count, Immediate(~7));
4362 __ j(zero, &last_bytes);
4363
4364 // Copy from edi to esi using rep movs instruction.
4365 __ movl(kScratchRegister, count);
4366 __ shr(count, Immediate(3)); // Number of doublewords to copy.
4367 __ repmovsq();
4368
4369 // Find number of bytes left.
4370 __ movl(count, kScratchRegister);
4371 __ and_(count, Immediate(7));
4372
4373 // Check if there are more bytes to copy.
4374 __ bind(&last_bytes);
4375 __ testl(count, count);
4376 __ j(zero, &done);
4377
4378 // Copy remaining characters.
4379 Label loop;
4380 __ bind(&loop);
4381 __ movb(kScratchRegister, Operand(src, 0));
4382 __ movb(Operand(dest, 0), kScratchRegister);
4383 __ incq(src);
4384 __ incq(dest);
4385 __ decl(count);
4386 __ j(not_zero, &loop);
4387
4388 __ bind(&done);
4389}
4390
4391void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4392 Register c1,
4393 Register c2,
4394 Register scratch1,
4395 Register scratch2,
4396 Register scratch3,
4397 Register scratch4,
4398 Label* not_found) {
4399 // Register scratch3 is the general scratch register in this function.
4400 Register scratch = scratch3;
4401
4402 // Make sure that both characters are not digits as such strings has a
4403 // different hash algorithm. Don't try to look for these in the symbol table.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004404 NearLabel not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004405 __ leal(scratch, Operand(c1, -'0'));
4406 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4407 __ j(above, &not_array_index);
4408 __ leal(scratch, Operand(c2, -'0'));
4409 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4410 __ j(below_equal, not_found);
4411
4412 __ bind(&not_array_index);
4413 // Calculate the two character string hash.
4414 Register hash = scratch1;
4415 GenerateHashInit(masm, hash, c1, scratch);
4416 GenerateHashAddCharacter(masm, hash, c2, scratch);
4417 GenerateHashGetHash(masm, hash, scratch);
4418
4419 // Collect the two characters in a register.
4420 Register chars = c1;
4421 __ shl(c2, Immediate(kBitsPerByte));
4422 __ orl(chars, c2);
4423
4424 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4425 // hash: hash of two character string.
4426
4427 // Load the symbol table.
4428 Register symbol_table = c2;
4429 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
4430
4431 // Calculate capacity mask from the symbol table capacity.
4432 Register mask = scratch2;
4433 __ SmiToInteger32(mask,
4434 FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
4435 __ decl(mask);
4436
4437 Register undefined = scratch4;
4438 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4439
4440 // Registers
4441 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4442 // hash: hash of two character string (32-bit int)
4443 // symbol_table: symbol table
4444 // mask: capacity mask (32-bit int)
4445 // undefined: undefined value
4446 // scratch: -
4447
4448 // Perform a number of probes in the symbol table.
4449 static const int kProbes = 4;
4450 Label found_in_symbol_table;
4451 Label next_probe[kProbes];
4452 for (int i = 0; i < kProbes; i++) {
4453 // Calculate entry in symbol table.
4454 __ movl(scratch, hash);
4455 if (i > 0) {
4456 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
4457 }
4458 __ andl(scratch, mask);
4459
4460 // Load the entry from the symble table.
4461 Register candidate = scratch; // Scratch register contains candidate.
4462 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
4463 __ movq(candidate,
4464 FieldOperand(symbol_table,
4465 scratch,
4466 times_pointer_size,
4467 SymbolTable::kElementsStartOffset));
4468
4469 // If entry is undefined no string with this hash can be found.
4470 __ cmpq(candidate, undefined);
4471 __ j(equal, not_found);
4472
4473 // If length is not 2 the string is not a candidate.
4474 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
4475 Smi::FromInt(2));
4476 __ j(not_equal, &next_probe[i]);
4477
4478 // We use kScratchRegister as a temporary register in assumption that
4479 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
4480 Register temp = kScratchRegister;
4481
4482 // Check that the candidate is a non-external ascii string.
4483 __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
4484 __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4485 __ JumpIfInstanceTypeIsNotSequentialAscii(
4486 temp, temp, &next_probe[i]);
4487
4488 // Check if the two characters match.
4489 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
4490 __ andl(temp, Immediate(0x0000ffff));
4491 __ cmpl(chars, temp);
4492 __ j(equal, &found_in_symbol_table);
4493 __ bind(&next_probe[i]);
4494 }
4495
4496 // No matching 2 character string found by probing.
4497 __ jmp(not_found);
4498
4499 // Scratch register contains result when we fall through to here.
4500 Register result = scratch;
4501 __ bind(&found_in_symbol_table);
4502 if (!result.is(rax)) {
4503 __ movq(rax, result);
4504 }
4505}
4506
4507
4508void StringHelper::GenerateHashInit(MacroAssembler* masm,
4509 Register hash,
4510 Register character,
4511 Register scratch) {
4512 // hash = character + (character << 10);
4513 __ movl(hash, character);
4514 __ shll(hash, Immediate(10));
4515 __ addl(hash, character);
4516 // hash ^= hash >> 6;
4517 __ movl(scratch, hash);
4518 __ sarl(scratch, Immediate(6));
4519 __ xorl(hash, scratch);
4520}
4521
4522
4523void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4524 Register hash,
4525 Register character,
4526 Register scratch) {
4527 // hash += character;
4528 __ addl(hash, character);
4529 // hash += hash << 10;
4530 __ movl(scratch, hash);
4531 __ shll(scratch, Immediate(10));
4532 __ addl(hash, scratch);
4533 // hash ^= hash >> 6;
4534 __ movl(scratch, hash);
4535 __ sarl(scratch, Immediate(6));
4536 __ xorl(hash, scratch);
4537}
4538
4539
4540void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4541 Register hash,
4542 Register scratch) {
4543 // hash += hash << 3;
4544 __ leal(hash, Operand(hash, hash, times_8, 0));
4545 // hash ^= hash >> 11;
4546 __ movl(scratch, hash);
4547 __ sarl(scratch, Immediate(11));
4548 __ xorl(hash, scratch);
4549 // hash += hash << 15;
4550 __ movl(scratch, hash);
4551 __ shll(scratch, Immediate(15));
4552 __ addl(hash, scratch);
4553
4554 // if (hash == 0) hash = 27;
4555 Label hash_not_zero;
4556 __ j(not_zero, &hash_not_zero);
4557 __ movl(hash, Immediate(27));
4558 __ bind(&hash_not_zero);
4559}
4560
4561void SubStringStub::Generate(MacroAssembler* masm) {
4562 Label runtime;
4563
4564 // Stack frame on entry.
4565 // rsp[0]: return address
4566 // rsp[8]: to
4567 // rsp[16]: from
4568 // rsp[24]: string
4569
4570 const int kToOffset = 1 * kPointerSize;
4571 const int kFromOffset = kToOffset + kPointerSize;
4572 const int kStringOffset = kFromOffset + kPointerSize;
4573 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
4574
4575 // Make sure first argument is a string.
4576 __ movq(rax, Operand(rsp, kStringOffset));
4577 STATIC_ASSERT(kSmiTag == 0);
4578 __ testl(rax, Immediate(kSmiTagMask));
4579 __ j(zero, &runtime);
4580 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4581 __ j(NegateCondition(is_string), &runtime);
4582
4583 // rax: string
4584 // rbx: instance type
4585 // Calculate length of sub string using the smi values.
4586 Label result_longer_than_two;
4587 __ movq(rcx, Operand(rsp, kToOffset));
4588 __ movq(rdx, Operand(rsp, kFromOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01004589 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004590
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004591 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004592 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
4593 Label return_rax;
4594 __ j(equal, &return_rax);
4595 // Special handling of sub-strings of length 1 and 2. One character strings
4596 // are handled in the runtime system (looked up in the single character
4597 // cache). Two character strings are looked for in the symbol cache.
4598 __ SmiToInteger32(rcx, rcx);
4599 __ cmpl(rcx, Immediate(2));
4600 __ j(greater, &result_longer_than_two);
4601 __ j(less, &runtime);
4602
4603 // Sub string of length 2 requested.
4604 // rax: string
4605 // rbx: instance type
4606 // rcx: sub string length (value is 2)
4607 // rdx: from index (smi)
4608 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
4609
4610 // Get the two characters forming the sub string.
4611 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
4612 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
4613 __ movzxbq(rcx,
4614 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
4615
4616 // Try to lookup two character string in symbol table.
4617 Label make_two_character_string;
4618 StringHelper::GenerateTwoCharacterSymbolTableProbe(
4619 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
4620 __ ret(3 * kPointerSize);
4621
4622 __ bind(&make_two_character_string);
4623 // Setup registers for allocating the two character string.
4624 __ movq(rax, Operand(rsp, kStringOffset));
4625 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
4626 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4627 __ Set(rcx, 2);
4628
4629 __ bind(&result_longer_than_two);
4630
4631 // rax: string
4632 // rbx: instance type
4633 // rcx: result string length
4634 // Check for flat ascii string
4635 Label non_ascii_flat;
4636 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
4637
4638 // Allocate the result.
4639 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
4640
4641 // rax: result string
4642 // rcx: result string length
4643 __ movq(rdx, rsi); // esi used by following code.
4644 // Locate first character of result.
4645 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4646 // Load string argument and locate character of sub string start.
4647 __ movq(rsi, Operand(rsp, kStringOffset));
4648 __ movq(rbx, Operand(rsp, kFromOffset));
4649 {
4650 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
4651 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4652 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4653 }
4654
4655 // rax: result string
4656 // rcx: result length
4657 // rdx: original value of rsi
4658 // rdi: first character of result
4659 // rsi: character of sub string start
4660 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4661 __ movq(rsi, rdx); // Restore rsi.
4662 __ IncrementCounter(&Counters::sub_string_native, 1);
4663 __ ret(kArgumentsSize);
4664
4665 __ bind(&non_ascii_flat);
4666 // rax: string
4667 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
4668 // rcx: result string length
4669 // Check for sequential two byte string
4670 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
4671 __ j(not_equal, &runtime);
4672
4673 // Allocate the result.
4674 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
4675
4676 // rax: result string
4677 // rcx: result string length
4678 __ movq(rdx, rsi); // esi used by following code.
4679 // Locate first character of result.
4680 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
4681 // Load string argument and locate character of sub string start.
4682 __ movq(rsi, Operand(rsp, kStringOffset));
4683 __ movq(rbx, Operand(rsp, kFromOffset));
4684 {
4685 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
4686 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4687 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4688 }
4689
4690 // rax: result string
4691 // rcx: result length
4692 // rdx: original value of rsi
4693 // rdi: first character of result
4694 // rsi: character of sub string start
4695 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
4696 __ movq(rsi, rdx); // Restore esi.
4697
4698 __ bind(&return_rax);
4699 __ IncrementCounter(&Counters::sub_string_native, 1);
4700 __ ret(kArgumentsSize);
4701
4702 // Just jump to runtime to create the sub string.
4703 __ bind(&runtime);
4704 __ TailCallRuntime(Runtime::kSubString, 3, 1);
4705}
4706
4707
4708void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4709 Register left,
4710 Register right,
4711 Register scratch1,
4712 Register scratch2,
4713 Register scratch3,
4714 Register scratch4) {
4715 // Ensure that you can always subtract a string length from a non-negative
4716 // number (e.g. another length).
4717 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
4718
4719 // Find minimum length and length difference.
4720 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
4721 __ movq(scratch4, scratch1);
4722 __ SmiSub(scratch4,
4723 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004724 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004725 // Register scratch4 now holds left.length - right.length.
4726 const Register length_difference = scratch4;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004727 NearLabel left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004728 __ j(less, &left_shorter);
4729 // The right string isn't longer that the left one.
4730 // Get the right string's length by subtracting the (non-negative) difference
4731 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004732 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004733 __ bind(&left_shorter);
4734 // Register scratch1 now holds Min(left.length, right.length).
4735 const Register min_length = scratch1;
4736
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004737 NearLabel compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004738 // If min-length is zero, go directly to comparing lengths.
4739 __ SmiTest(min_length);
4740 __ j(zero, &compare_lengths);
4741
4742 __ SmiToInteger32(min_length, min_length);
4743
4744 // Registers scratch2 and scratch3 are free.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004745 NearLabel result_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004746 Label loop;
4747 {
4748 // Check characters 0 .. min_length - 1 in a loop.
4749 // Use scratch3 as loop index, min_length as limit and scratch2
4750 // for computation.
4751 const Register index = scratch3;
4752 __ movl(index, Immediate(0)); // Index into strings.
4753 __ bind(&loop);
4754 // Compare characters.
4755 // TODO(lrn): Could we load more than one character at a time?
4756 __ movb(scratch2, FieldOperand(left,
4757 index,
4758 times_1,
4759 SeqAsciiString::kHeaderSize));
4760 // Increment index and use -1 modifier on next load to give
4761 // the previous load extra time to complete.
4762 __ addl(index, Immediate(1));
4763 __ cmpb(scratch2, FieldOperand(right,
4764 index,
4765 times_1,
4766 SeqAsciiString::kHeaderSize - 1));
4767 __ j(not_equal, &result_not_equal);
4768 __ cmpl(index, min_length);
4769 __ j(not_equal, &loop);
4770 }
4771 // Completed loop without finding different characters.
4772 // Compare lengths (precomputed).
4773 __ bind(&compare_lengths);
4774 __ SmiTest(length_difference);
4775 __ j(not_zero, &result_not_equal);
4776
4777 // Result is EQUAL.
4778 __ Move(rax, Smi::FromInt(EQUAL));
4779 __ ret(0);
4780
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004781 NearLabel result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004782 __ bind(&result_not_equal);
4783 // Unequal comparison of left to right, either character or length.
4784 __ j(greater, &result_greater);
4785
4786 // Result is LESS.
4787 __ Move(rax, Smi::FromInt(LESS));
4788 __ ret(0);
4789
4790 // Result is GREATER.
4791 __ bind(&result_greater);
4792 __ Move(rax, Smi::FromInt(GREATER));
4793 __ ret(0);
4794}
4795
4796
4797void StringCompareStub::Generate(MacroAssembler* masm) {
4798 Label runtime;
4799
4800 // Stack frame on entry.
4801 // rsp[0]: return address
4802 // rsp[8]: right string
4803 // rsp[16]: left string
4804
4805 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
4806 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
4807
4808 // Check for identity.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004809 NearLabel not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004810 __ cmpq(rdx, rax);
4811 __ j(not_equal, &not_same);
4812 __ Move(rax, Smi::FromInt(EQUAL));
4813 __ IncrementCounter(&Counters::string_compare_native, 1);
4814 __ ret(2 * kPointerSize);
4815
4816 __ bind(&not_same);
4817
4818 // Check that both are sequential ASCII strings.
4819 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4820
4821 // Inline comparison of ascii strings.
4822 __ IncrementCounter(&Counters::string_compare_native, 1);
4823 // Drop arguments from the stack
4824 __ pop(rcx);
4825 __ addq(rsp, Immediate(2 * kPointerSize));
4826 __ push(rcx);
4827 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4828
4829 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4830 // tagged as a small integer.
4831 __ bind(&runtime);
4832 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4833}
4834
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004835
4836void StringCharAtStub::Generate(MacroAssembler* masm) {
4837 // Expects two arguments (object, index) on the stack:
4838
4839 // Stack frame on entry.
4840 // rsp[0]: return address
4841 // rsp[8]: index
4842 // rsp[16]: object
4843
4844 Register object = rbx;
4845 Register index = rax;
4846 Register scratch1 = rcx;
4847 Register scratch2 = rdx;
4848 Register result = rax;
4849
4850 __ pop(scratch1); // Return address.
4851 __ pop(index);
4852 __ pop(object);
4853 __ push(scratch1);
4854
4855 Label need_conversion;
4856 Label index_out_of_range;
4857 Label done;
4858 StringCharAtGenerator generator(object,
4859 index,
4860 scratch1,
4861 scratch2,
4862 result,
4863 &need_conversion,
4864 &need_conversion,
4865 &index_out_of_range,
4866 STRING_INDEX_IS_NUMBER);
4867 generator.GenerateFast(masm);
4868 __ jmp(&done);
4869
4870 __ bind(&index_out_of_range);
4871 // When the index is out of range, the spec requires us to return
4872 // the empty string.
4873 __ Move(result, Factory::empty_string());
4874 __ jmp(&done);
4875
4876 __ bind(&need_conversion);
4877 // Move smi zero into the result register, which will trigger
4878 // conversion.
4879 __ Move(result, Smi::FromInt(0));
4880 __ jmp(&done);
4881
4882 StubRuntimeCallHelper call_helper;
4883 generator.GenerateSlow(masm, call_helper);
4884
4885 __ bind(&done);
4886 __ ret(0);
4887}
4888
4889
Ben Murdochb0fe1622011-05-05 13:52:32 +01004890void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004891 ASSERT(state_ == CompareIC::SMIS);
4892 NearLabel miss;
4893 __ JumpIfNotBothSmi(rdx, rax, &miss);
4894
4895 if (GetCondition() == equal) {
4896 // For equality we do not care about the sign of the result.
4897 __ subq(rax, rdx);
4898 } else {
4899 NearLabel done;
4900 __ subq(rdx, rax);
4901 __ j(no_overflow, &done);
4902 // Correct sign of result in case of overflow.
4903 __ SmiNot(rdx, rdx);
4904 __ bind(&done);
4905 __ movq(rax, rdx);
4906 }
4907 __ ret(0);
4908
4909 __ bind(&miss);
4910 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004911}
4912
4913
4914void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004915 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
4916
4917 NearLabel generic_stub;
4918 NearLabel unordered;
4919 NearLabel miss;
4920 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
4921 __ j(either_smi, &generic_stub);
4922
4923 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
4924 __ j(not_equal, &miss);
4925 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
4926 __ j(not_equal, &miss);
4927
4928 // Load left and right operand
4929 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
4930 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
4931
4932 // Compare operands
4933 __ ucomisd(xmm0, xmm1);
4934
4935 // Don't base result on EFLAGS when a NaN is involved.
4936 __ j(parity_even, &unordered);
4937
4938 // Return a result of -1, 0, or 1, based on EFLAGS.
4939 // Performing mov, because xor would destroy the flag register.
4940 __ movl(rax, Immediate(0));
4941 __ movl(rcx, Immediate(0));
4942 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
4943 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
4944 __ ret(0);
4945
4946 __ bind(&unordered);
4947
4948 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
4949 __ bind(&generic_stub);
4950 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4951
4952 __ bind(&miss);
4953 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004954}
4955
4956
4957void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004958 ASSERT(state_ == CompareIC::OBJECTS);
4959 NearLabel miss;
4960 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4961 __ j(either_smi, &miss);
4962
4963 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
4964 __ j(not_equal, &miss, not_taken);
4965 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
4966 __ j(not_equal, &miss, not_taken);
4967
4968 ASSERT(GetCondition() == equal);
4969 __ subq(rax, rdx);
4970 __ ret(0);
4971
4972 __ bind(&miss);
4973 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004974}
4975
4976
4977void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004978 // Save the registers.
4979 __ pop(rcx);
4980 __ push(rdx);
4981 __ push(rax);
4982 __ push(rcx);
4983
4984 // Call the runtime system in a fresh internal frame.
4985 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
4986 __ EnterInternalFrame();
4987 __ push(rdx);
4988 __ push(rax);
4989 __ Push(Smi::FromInt(op_));
4990 __ CallExternalReference(miss, 3);
4991 __ LeaveInternalFrame();
4992
4993 // Compute the entry point of the rewritten stub.
4994 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4995
4996 // Restore registers.
4997 __ pop(rcx);
4998 __ pop(rax);
4999 __ pop(rdx);
5000 __ push(rcx);
5001
5002 // Do a tail call to the rewritten stub.
5003 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005004}
5005
Steve Block1e0659c2011-05-24 12:43:12 +01005006
5007void GenerateFastPixelArrayLoad(MacroAssembler* masm,
5008 Register receiver,
5009 Register key,
5010 Register elements,
5011 Register untagged_key,
5012 Register result,
5013 Label* not_pixel_array,
5014 Label* key_not_smi,
5015 Label* out_of_range) {
5016 // Register use:
5017 // receiver - holds the receiver and is unchanged.
5018 // key - holds the key and is unchanged (must be a smi).
5019 // elements - is set to the the receiver's element if
5020 // the receiver doesn't have a pixel array or the
5021 // key is not a smi, otherwise it's the elements'
5022 // external pointer.
5023 // untagged_key - is set to the untagged key
5024
5025 // Some callers already have verified that the key is a smi. key_not_smi is
5026 // set to NULL as a sentinel for that case. Otherwise, add an explicit check
5027 // to ensure the key is a smi must be added.
5028 if (key_not_smi != NULL) {
5029 __ JumpIfNotSmi(key, key_not_smi);
5030 } else {
5031 if (FLAG_debug_code) {
5032 __ AbortIfNotSmi(key);
5033 }
5034 }
5035 __ SmiToInteger32(untagged_key, key);
5036
Steve Block1e0659c2011-05-24 12:43:12 +01005037 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005038 // By passing NULL as not_pixel_array, callers signal that they have already
5039 // verified that the receiver has pixel array elements.
5040 if (not_pixel_array != NULL) {
5041 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
5042 } else {
5043 if (FLAG_debug_code) {
5044 // Map check should have already made sure that elements is a pixel array.
5045 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
5046 Factory::pixel_array_map());
5047 __ Assert(equal, "Elements isn't a pixel array");
5048 }
5049 }
Steve Block1e0659c2011-05-24 12:43:12 +01005050
5051 // Check that the smi is in range.
5052 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
5053 __ j(above_equal, out_of_range); // unsigned check handles negative keys.
5054
5055 // Load and tag the element as a smi.
5056 __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
5057 __ movzxbq(result, Operand(elements, untagged_key, times_1, 0));
5058 __ Integer32ToSmi(result, result);
5059 __ ret(0);
5060}
5061
5062
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005063// Stores an indexed element into a pixel array, clamping the stored value.
5064void GenerateFastPixelArrayStore(MacroAssembler* masm,
5065 Register receiver,
5066 Register key,
5067 Register value,
5068 Register elements,
5069 Register scratch1,
5070 bool load_elements_from_receiver,
5071 bool key_is_untagged,
5072 Label* key_not_smi,
5073 Label* value_not_smi,
5074 Label* not_pixel_array,
5075 Label* out_of_range) {
5076 // Register use:
5077 // receiver - holds the receiver and is unchanged.
5078 // key - holds the key (must be a smi) and is unchanged.
5079 // value - holds the value (must be a smi) and is unchanged.
5080 // elements - holds the element object of the receiver on entry if
5081 // load_elements_from_receiver is false, otherwise used
5082 // internally to store the pixel arrays elements and
5083 // external array pointer.
5084 //
5085 Register external_pointer = elements;
5086 Register untagged_key = scratch1;
5087 Register untagged_value = receiver; // Only set once success guaranteed.
5088
5089 // Fetch the receiver's elements if the caller hasn't already done so.
5090 if (load_elements_from_receiver) {
5091 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
5092 }
5093
5094 // By passing NULL as not_pixel_array, callers signal that they have already
5095 // verified that the receiver has pixel array elements.
5096 if (not_pixel_array != NULL) {
5097 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
5098 } else {
5099 if (FLAG_debug_code) {
5100 // Map check should have already made sure that elements is a pixel array.
5101 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset),
5102 Factory::pixel_array_map());
5103 __ Assert(equal, "Elements isn't a pixel array");
5104 }
5105 }
5106
5107 // Key must be a smi and it must be in range.
5108 if (key_is_untagged) {
5109 untagged_key = key;
5110 } else {
5111 // Some callers already have verified that the key is a smi. key_not_smi is
5112 // set to NULL as a sentinel for that case. Otherwise, add an explicit
5113 // check to ensure the key is a smi.
5114 if (key_not_smi != NULL) {
5115 __ JumpIfNotSmi(key, key_not_smi);
5116 } else {
5117 if (FLAG_debug_code) {
5118 __ AbortIfNotSmi(key);
5119 }
5120 }
5121 __ SmiToInteger32(untagged_key, key);
5122 }
5123 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
5124 __ j(above_equal, out_of_range); // unsigned check handles negative keys.
5125
5126 // Value must be a smi.
5127 __ JumpIfNotSmi(value, value_not_smi);
5128 __ SmiToInteger32(untagged_value, value);
5129
5130 { // Clamp the value to [0..255].
5131 NearLabel done;
5132 __ testl(untagged_value, Immediate(0xFFFFFF00));
5133 __ j(zero, &done);
5134 __ setcc(negative, untagged_value); // 1 if negative, 0 if positive.
5135 __ decb(untagged_value); // 0 if negative, 255 if positive.
5136 __ bind(&done);
5137 }
5138
5139 __ movq(external_pointer,
5140 FieldOperand(elements, PixelArray::kExternalPointerOffset));
5141 __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value);
5142 __ ret(0); // Return value in eax.
5143}
5144
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005145#undef __
5146
5147} } // namespace v8::internal
5148
5149#endif // V8_TARGET_ARCH_X64