blob: 14e352731fed0e2ec08266d8d6b5bf6a76190bb9 [file] [log] [blame]
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
34#include "regexp-macro-assembler.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40void FastNewClosureStub::Generate(MacroAssembler* masm) {
41 // Create a new closure from the given function info in new
42 // space. Set the context to the current context in rsi.
43 Label gc;
44 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
45
46 // Get the function info from the stack.
47 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
48
49 // Compute the function map in the current global context and set that
50 // as the map of the allocated object.
51 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
52 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
53 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
54 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
55
56 // Initialize the rest of the function. We don't have to update the
57 // write barrier because the allocated object is in new space.
58 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
59 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
60 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
61 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
62 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
63 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
64 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
65 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
66
67 // Initialize the code pointer in the function to be the one
68 // found in the shared function info object.
69 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
70 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
71 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
72
73
74 // Return and remove the on-stack parameter.
75 __ ret(1 * kPointerSize);
76
77 // Create a new closure through the slower runtime call.
78 __ bind(&gc);
79 __ pop(rcx); // Temporarily remove return address.
80 __ pop(rdx);
81 __ push(rsi);
82 __ push(rdx);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080083 __ Push(Factory::false_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +010084 __ push(rcx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080085 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010086}
87
88
89void FastNewContextStub::Generate(MacroAssembler* masm) {
90 // Try to allocate the context in new space.
91 Label gc;
92 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
93 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
94 rax, rbx, rcx, &gc, TAG_OBJECT);
95
96 // Get the function from the stack.
97 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
98
99 // Setup the object header.
100 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
101 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
102 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
103
104 // Setup the fixed slots.
105 __ xor_(rbx, rbx); // Set to NULL.
106 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
107 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
108 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
109 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
110
111 // Copy the global object from the surrounding context.
112 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
113 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
114
115 // Initialize the rest of the slots to undefined.
116 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
117 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
118 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
119 }
120
121 // Return and remove the on-stack parameter.
122 __ movq(rsi, rax);
123 __ ret(1 * kPointerSize);
124
125 // Need to collect. Call into runtime system.
126 __ bind(&gc);
127 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
128}
129
130
131void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
132 // Stack layout on entry:
133 //
134 // [rsp + kPointerSize]: constant elements.
135 // [rsp + (2 * kPointerSize)]: literal index.
136 // [rsp + (3 * kPointerSize)]: literals array.
137
138 // All sizes here are multiples of kPointerSize.
139 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
140 int size = JSArray::kSize + elements_size;
141
142 // Load boilerplate object into rcx and check if we need to create a
143 // boilerplate.
144 Label slow_case;
145 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
146 __ movq(rax, Operand(rsp, 2 * kPointerSize));
147 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
148 __ movq(rcx,
149 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
150 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
151 __ j(equal, &slow_case);
152
153 if (FLAG_debug_code) {
154 const char* message;
155 Heap::RootListIndex expected_map_index;
156 if (mode_ == CLONE_ELEMENTS) {
157 message = "Expected (writable) fixed array";
158 expected_map_index = Heap::kFixedArrayMapRootIndex;
159 } else {
160 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
161 message = "Expected copy-on-write fixed array";
162 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
163 }
164 __ push(rcx);
165 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
166 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
167 expected_map_index);
168 __ Assert(equal, message);
169 __ pop(rcx);
170 }
171
172 // Allocate both the JS array and the elements array in one big
173 // allocation. This avoids multiple limit checks.
174 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
175
176 // Copy the JS array part.
177 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
178 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
179 __ movq(rbx, FieldOperand(rcx, i));
180 __ movq(FieldOperand(rax, i), rbx);
181 }
182 }
183
184 if (length_ > 0) {
185 // Get hold of the elements array of the boilerplate and setup the
186 // elements pointer in the resulting object.
187 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
188 __ lea(rdx, Operand(rax, JSArray::kSize));
189 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
190
191 // Copy the elements array.
192 for (int i = 0; i < elements_size; i += kPointerSize) {
193 __ movq(rbx, FieldOperand(rcx, i));
194 __ movq(FieldOperand(rdx, i), rbx);
195 }
196 }
197
198 // Return and remove the on-stack parameters.
199 __ ret(3 * kPointerSize);
200
201 __ bind(&slow_case);
202 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
203}
204
205
206void ToBooleanStub::Generate(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100207 NearLabel false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100208 __ movq(rax, Operand(rsp, 1 * kPointerSize));
209
210 // 'null' => false.
211 __ CompareRoot(rax, Heap::kNullValueRootIndex);
212 __ j(equal, &false_result);
213
214 // Get the map and type of the heap object.
215 // We don't use CmpObjectType because we manipulate the type field.
216 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
217 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
218
219 // Undetectable => false.
220 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
221 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
222 __ j(not_zero, &false_result);
223
224 // JavaScript object => true.
225 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
226 __ j(above_equal, &true_result);
227
228 // String value => false iff empty.
229 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
230 __ j(above_equal, &not_string);
231 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
232 __ SmiTest(rdx);
233 __ j(zero, &false_result);
234 __ jmp(&true_result);
235
236 __ bind(&not_string);
237 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
238 __ j(not_equal, &true_result);
239 // HeapNumber => false iff +0, -0, or NaN.
240 // These three cases set the zero flag when compared to zero using ucomisd.
241 __ xorpd(xmm0, xmm0);
242 __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
243 __ j(zero, &false_result);
244 // Fall through to |true_result|.
245
246 // Return 1/0 for true/false in rax.
247 __ bind(&true_result);
248 __ movq(rax, Immediate(1));
249 __ ret(1 * kPointerSize);
250 __ bind(&false_result);
251 __ xor_(rax, rax);
252 __ ret(1 * kPointerSize);
253}
254
255
256const char* GenericBinaryOpStub::GetName() {
257 if (name_ != NULL) return name_;
258 const int kMaxNameLength = 100;
259 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
260 if (name_ == NULL) return "OOM";
261 const char* op_name = Token::Name(op_);
262 const char* overwrite_name;
263 switch (mode_) {
264 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
265 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
266 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
267 default: overwrite_name = "UnknownOverwrite"; break;
268 }
269
270 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
271 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
272 op_name,
273 overwrite_name,
274 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
275 args_in_registers_ ? "RegArgs" : "StackArgs",
276 args_reversed_ ? "_R" : "",
277 static_operands_type_.ToString(),
278 BinaryOpIC::GetName(runtime_operands_type_));
279 return name_;
280}
281
282
283void GenericBinaryOpStub::GenerateCall(
284 MacroAssembler* masm,
285 Register left,
286 Register right) {
287 if (!ArgsInRegistersSupported()) {
288 // Pass arguments on the stack.
289 __ push(left);
290 __ push(right);
291 } else {
292 // The calling convention with registers is left in rdx and right in rax.
293 Register left_arg = rdx;
294 Register right_arg = rax;
295 if (!(left.is(left_arg) && right.is(right_arg))) {
296 if (left.is(right_arg) && right.is(left_arg)) {
297 if (IsOperationCommutative()) {
298 SetArgsReversed();
299 } else {
300 __ xchg(left, right);
301 }
302 } else if (left.is(left_arg)) {
303 __ movq(right_arg, right);
304 } else if (right.is(right_arg)) {
305 __ movq(left_arg, left);
306 } else if (left.is(right_arg)) {
307 if (IsOperationCommutative()) {
308 __ movq(left_arg, right);
309 SetArgsReversed();
310 } else {
311 // Order of moves important to avoid destroying left argument.
312 __ movq(left_arg, left);
313 __ movq(right_arg, right);
314 }
315 } else if (right.is(left_arg)) {
316 if (IsOperationCommutative()) {
317 __ movq(right_arg, left);
318 SetArgsReversed();
319 } else {
320 // Order of moves important to avoid destroying right argument.
321 __ movq(right_arg, right);
322 __ movq(left_arg, left);
323 }
324 } else {
325 // Order of moves is not important.
326 __ movq(left_arg, left);
327 __ movq(right_arg, right);
328 }
329 }
330
331 // Update flags to indicate that arguments are in registers.
332 SetArgsInRegisters();
333 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
334 }
335
336 // Call the stub.
337 __ CallStub(this);
338}
339
340
341void GenericBinaryOpStub::GenerateCall(
342 MacroAssembler* masm,
343 Register left,
344 Smi* right) {
345 if (!ArgsInRegistersSupported()) {
346 // Pass arguments on the stack.
347 __ push(left);
348 __ Push(right);
349 } else {
350 // The calling convention with registers is left in rdx and right in rax.
351 Register left_arg = rdx;
352 Register right_arg = rax;
353 if (left.is(left_arg)) {
354 __ Move(right_arg, right);
355 } else if (left.is(right_arg) && IsOperationCommutative()) {
356 __ Move(left_arg, right);
357 SetArgsReversed();
358 } else {
359 // For non-commutative operations, left and right_arg might be
360 // the same register. Therefore, the order of the moves is
361 // important here in order to not overwrite left before moving
362 // it to left_arg.
363 __ movq(left_arg, left);
364 __ Move(right_arg, right);
365 }
366
367 // Update flags to indicate that arguments are in registers.
368 SetArgsInRegisters();
369 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
370 }
371
372 // Call the stub.
373 __ CallStub(this);
374}
375
376
377void GenericBinaryOpStub::GenerateCall(
378 MacroAssembler* masm,
379 Smi* left,
380 Register right) {
381 if (!ArgsInRegistersSupported()) {
382 // Pass arguments on the stack.
383 __ Push(left);
384 __ push(right);
385 } else {
386 // The calling convention with registers is left in rdx and right in rax.
387 Register left_arg = rdx;
388 Register right_arg = rax;
389 if (right.is(right_arg)) {
390 __ Move(left_arg, left);
391 } else if (right.is(left_arg) && IsOperationCommutative()) {
392 __ Move(right_arg, left);
393 SetArgsReversed();
394 } else {
395 // For non-commutative operations, right and left_arg might be
396 // the same register. Therefore, the order of the moves is
397 // important here in order to not overwrite right before moving
398 // it to right_arg.
399 __ movq(right_arg, right);
400 __ Move(left_arg, left);
401 }
402 // Update flags to indicate that arguments are in registers.
403 SetArgsInRegisters();
404 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
405 }
406
407 // Call the stub.
408 __ CallStub(this);
409}
410
411
412class FloatingPointHelper : public AllStatic {
413 public:
414 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
415 // If the operands are not both numbers, jump to not_numbers.
416 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
417 // NumberOperands assumes both are smis or heap numbers.
418 static void LoadSSE2SmiOperands(MacroAssembler* masm);
419 static void LoadSSE2NumberOperands(MacroAssembler* masm);
420 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
421 Label* not_numbers);
422
423 // Takes the operands in rdx and rax and loads them as integers in rax
424 // and rcx.
425 static void LoadAsIntegers(MacroAssembler* masm,
426 Label* operand_conversion_failure,
427 Register heap_number_map);
428 // As above, but we know the operands to be numbers. In that case,
429 // conversion can't fail.
430 static void LoadNumbersAsIntegers(MacroAssembler* masm);
431};
432
433
434void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
435 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
436 // dividend in rax and rdx free for the division. Use rax, rbx for those.
437 Comment load_comment(masm, "-- Load arguments");
438 Register left = rdx;
439 Register right = rax;
440 if (op_ == Token::DIV || op_ == Token::MOD) {
441 left = rax;
442 right = rbx;
443 if (HasArgsInRegisters()) {
444 __ movq(rbx, rax);
445 __ movq(rax, rdx);
446 }
447 }
448 if (!HasArgsInRegisters()) {
449 __ movq(right, Operand(rsp, 1 * kPointerSize));
450 __ movq(left, Operand(rsp, 2 * kPointerSize));
451 }
452
453 Label not_smis;
454 // 2. Smi check both operands.
455 if (static_operands_type_.IsSmi()) {
456 // Skip smi check if we know that both arguments are smis.
457 if (FLAG_debug_code) {
458 __ AbortIfNotSmi(left);
459 __ AbortIfNotSmi(right);
460 }
461 if (op_ == Token::BIT_OR) {
462 // Handle OR here, since we do extra smi-checking in the or code below.
463 __ SmiOr(right, right, left);
464 GenerateReturn(masm);
465 return;
466 }
467 } else {
468 if (op_ != Token::BIT_OR) {
469 // Skip the check for OR as it is better combined with the
470 // actual operation.
471 Comment smi_check_comment(masm, "-- Smi check arguments");
472 __ JumpIfNotBothSmi(left, right, &not_smis);
473 }
474 }
475
476 // 3. Operands are both smis (except for OR), perform the operation leaving
477 // the result in rax and check the result if necessary.
478 Comment perform_smi(masm, "-- Perform smi operation");
479 Label use_fp_on_smis;
480 switch (op_) {
481 case Token::ADD: {
482 ASSERT(right.is(rax));
483 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
484 break;
485 }
486
487 case Token::SUB: {
488 __ SmiSub(left, left, right, &use_fp_on_smis);
489 __ movq(rax, left);
490 break;
491 }
492
493 case Token::MUL:
494 ASSERT(right.is(rax));
495 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
496 break;
497
498 case Token::DIV:
499 ASSERT(left.is(rax));
500 __ SmiDiv(left, left, right, &use_fp_on_smis);
501 break;
502
503 case Token::MOD:
504 ASSERT(left.is(rax));
505 __ SmiMod(left, left, right, slow);
506 break;
507
508 case Token::BIT_OR:
509 ASSERT(right.is(rax));
510 __ movq(rcx, right); // Save the right operand.
511 __ SmiOr(right, right, left); // BIT_OR is commutative.
512 __ testb(right, Immediate(kSmiTagMask));
513 __ j(not_zero, &not_smis);
514 break;
515
516 case Token::BIT_AND:
517 ASSERT(right.is(rax));
518 __ SmiAnd(right, right, left); // BIT_AND is commutative.
519 break;
520
521 case Token::BIT_XOR:
522 ASSERT(right.is(rax));
523 __ SmiXor(right, right, left); // BIT_XOR is commutative.
524 break;
525
526 case Token::SHL:
527 case Token::SHR:
528 case Token::SAR:
529 switch (op_) {
530 case Token::SAR:
531 __ SmiShiftArithmeticRight(left, left, right);
532 break;
533 case Token::SHR:
534 __ SmiShiftLogicalRight(left, left, right, slow);
535 break;
536 case Token::SHL:
537 __ SmiShiftLeft(left, left, right);
538 break;
539 default:
540 UNREACHABLE();
541 }
542 __ movq(rax, left);
543 break;
544
545 default:
546 UNREACHABLE();
547 break;
548 }
549
550 // 4. Emit return of result in rax.
551 GenerateReturn(masm);
552
553 // 5. For some operations emit inline code to perform floating point
554 // operations on known smis (e.g., if the result of the operation
555 // overflowed the smi range).
556 switch (op_) {
557 case Token::ADD:
558 case Token::SUB:
559 case Token::MUL:
560 case Token::DIV: {
561 ASSERT(use_fp_on_smis.is_linked());
562 __ bind(&use_fp_on_smis);
563 if (op_ == Token::DIV) {
564 __ movq(rdx, rax);
565 __ movq(rax, rbx);
566 }
567 // left is rdx, right is rax.
568 __ AllocateHeapNumber(rbx, rcx, slow);
569 FloatingPointHelper::LoadSSE2SmiOperands(masm);
570 switch (op_) {
571 case Token::ADD: __ addsd(xmm0, xmm1); break;
572 case Token::SUB: __ subsd(xmm0, xmm1); break;
573 case Token::MUL: __ mulsd(xmm0, xmm1); break;
574 case Token::DIV: __ divsd(xmm0, xmm1); break;
575 default: UNREACHABLE();
576 }
577 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
578 __ movq(rax, rbx);
579 GenerateReturn(masm);
580 }
581 default:
582 break;
583 }
584
585 // 6. Non-smi operands, fall out to the non-smi code with the operands in
586 // rdx and rax.
587 Comment done_comment(masm, "-- Enter non-smi code");
588 __ bind(&not_smis);
589
590 switch (op_) {
591 case Token::DIV:
592 case Token::MOD:
593 // Operands are in rax, rbx at this point.
594 __ movq(rdx, rax);
595 __ movq(rax, rbx);
596 break;
597
598 case Token::BIT_OR:
599 // Right operand is saved in rcx and rax was destroyed by the smi
600 // operation.
601 __ movq(rax, rcx);
602 break;
603
604 default:
605 break;
606 }
607}
608
609
610void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
611 Label call_runtime;
612
613 if (ShouldGenerateSmiCode()) {
614 GenerateSmiCode(masm, &call_runtime);
615 } else if (op_ != Token::MOD) {
616 if (!HasArgsInRegisters()) {
617 GenerateLoadArguments(masm);
618 }
619 }
620 // Floating point case.
621 if (ShouldGenerateFPCode()) {
622 switch (op_) {
623 case Token::ADD:
624 case Token::SUB:
625 case Token::MUL:
626 case Token::DIV: {
627 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
628 HasSmiCodeInStub()) {
629 // Execution reaches this point when the first non-smi argument occurs
630 // (and only if smi code is generated). This is the right moment to
631 // patch to HEAP_NUMBERS state. The transition is attempted only for
632 // the four basic operations. The stub stays in the DEFAULT state
633 // forever for all other operations (also if smi code is skipped).
634 GenerateTypeTransition(masm);
635 break;
636 }
637
638 Label not_floats;
639 // rax: y
640 // rdx: x
641 if (static_operands_type_.IsNumber()) {
642 if (FLAG_debug_code) {
643 // Assert at runtime that inputs are only numbers.
644 __ AbortIfNotNumber(rdx);
645 __ AbortIfNotNumber(rax);
646 }
647 FloatingPointHelper::LoadSSE2NumberOperands(masm);
648 } else {
649 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &call_runtime);
650 }
651
652 switch (op_) {
653 case Token::ADD: __ addsd(xmm0, xmm1); break;
654 case Token::SUB: __ subsd(xmm0, xmm1); break;
655 case Token::MUL: __ mulsd(xmm0, xmm1); break;
656 case Token::DIV: __ divsd(xmm0, xmm1); break;
657 default: UNREACHABLE();
658 }
659 // Allocate a heap number, if needed.
660 Label skip_allocation;
661 OverwriteMode mode = mode_;
662 if (HasArgsReversed()) {
663 if (mode == OVERWRITE_RIGHT) {
664 mode = OVERWRITE_LEFT;
665 } else if (mode == OVERWRITE_LEFT) {
666 mode = OVERWRITE_RIGHT;
667 }
668 }
669 switch (mode) {
670 case OVERWRITE_LEFT:
671 __ JumpIfNotSmi(rdx, &skip_allocation);
672 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
673 __ movq(rdx, rbx);
674 __ bind(&skip_allocation);
675 __ movq(rax, rdx);
676 break;
677 case OVERWRITE_RIGHT:
678 // If the argument in rax is already an object, we skip the
679 // allocation of a heap number.
680 __ JumpIfNotSmi(rax, &skip_allocation);
681 // Fall through!
682 case NO_OVERWRITE:
683 // Allocate a heap number for the result. Keep rax and rdx intact
684 // for the possible runtime call.
685 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
686 __ movq(rax, rbx);
687 __ bind(&skip_allocation);
688 break;
689 default: UNREACHABLE();
690 }
691 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
692 GenerateReturn(masm);
693 __ bind(&not_floats);
694 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
695 !HasSmiCodeInStub()) {
696 // Execution reaches this point when the first non-number argument
697 // occurs (and only if smi code is skipped from the stub, otherwise
698 // the patching has already been done earlier in this case branch).
699 // A perfect moment to try patching to STRINGS for ADD operation.
700 if (op_ == Token::ADD) {
701 GenerateTypeTransition(masm);
702 }
703 }
704 break;
705 }
706 case Token::MOD: {
707 // For MOD we go directly to runtime in the non-smi case.
708 break;
709 }
710 case Token::BIT_OR:
711 case Token::BIT_AND:
712 case Token::BIT_XOR:
713 case Token::SAR:
714 case Token::SHL:
715 case Token::SHR: {
716 Label skip_allocation, non_smi_shr_result;
717 Register heap_number_map = r9;
718 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
719 if (static_operands_type_.IsNumber()) {
720 if (FLAG_debug_code) {
721 // Assert at runtime that inputs are only numbers.
722 __ AbortIfNotNumber(rdx);
723 __ AbortIfNotNumber(rax);
724 }
725 FloatingPointHelper::LoadNumbersAsIntegers(masm);
726 } else {
727 FloatingPointHelper::LoadAsIntegers(masm,
728 &call_runtime,
729 heap_number_map);
730 }
731 switch (op_) {
732 case Token::BIT_OR: __ orl(rax, rcx); break;
733 case Token::BIT_AND: __ andl(rax, rcx); break;
734 case Token::BIT_XOR: __ xorl(rax, rcx); break;
735 case Token::SAR: __ sarl_cl(rax); break;
736 case Token::SHL: __ shll_cl(rax); break;
737 case Token::SHR: {
738 __ shrl_cl(rax);
739 // Check if result is negative. This can only happen for a shift
740 // by zero.
741 __ testl(rax, rax);
742 __ j(negative, &non_smi_shr_result);
743 break;
744 }
745 default: UNREACHABLE();
746 }
747
748 STATIC_ASSERT(kSmiValueSize == 32);
749 // Tag smi result and return.
750 __ Integer32ToSmi(rax, rax);
751 GenerateReturn(masm);
752
753 // All bit-ops except SHR return a signed int32 that can be
754 // returned immediately as a smi.
755 // We might need to allocate a HeapNumber if we shift a negative
756 // number right by zero (i.e., convert to UInt32).
757 if (op_ == Token::SHR) {
758 ASSERT(non_smi_shr_result.is_linked());
759 __ bind(&non_smi_shr_result);
760 // Allocate a heap number if needed.
761 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
762 switch (mode_) {
763 case OVERWRITE_LEFT:
764 case OVERWRITE_RIGHT:
765 // If the operand was an object, we skip the
766 // allocation of a heap number.
767 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
768 1 * kPointerSize : 2 * kPointerSize));
769 __ JumpIfNotSmi(rax, &skip_allocation);
770 // Fall through!
771 case NO_OVERWRITE:
772 // Allocate heap number in new space.
773 // Not using AllocateHeapNumber macro in order to reuse
774 // already loaded heap_number_map.
775 __ AllocateInNewSpace(HeapNumber::kSize,
776 rax,
777 rcx,
778 no_reg,
779 &call_runtime,
780 TAG_OBJECT);
781 // Set the map.
782 if (FLAG_debug_code) {
783 __ AbortIfNotRootValue(heap_number_map,
784 Heap::kHeapNumberMapRootIndex,
785 "HeapNumberMap register clobbered.");
786 }
787 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
788 heap_number_map);
789 __ bind(&skip_allocation);
790 break;
791 default: UNREACHABLE();
792 }
793 // Store the result in the HeapNumber and return.
794 __ cvtqsi2sd(xmm0, rbx);
795 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
796 GenerateReturn(masm);
797 }
798
799 break;
800 }
801 default: UNREACHABLE(); break;
802 }
803 }
804
805 // If all else fails, use the runtime system to get the correct
806 // result. If arguments was passed in registers now place them on the
807 // stack in the correct order below the return address.
808 __ bind(&call_runtime);
809
810 if (HasArgsInRegisters()) {
811 GenerateRegisterArgsPush(masm);
812 }
813
814 switch (op_) {
815 case Token::ADD: {
816 // Registers containing left and right operands respectively.
817 Register lhs, rhs;
818
819 if (HasArgsReversed()) {
820 lhs = rax;
821 rhs = rdx;
822 } else {
823 lhs = rdx;
824 rhs = rax;
825 }
826
827 // Test for string arguments before calling runtime.
828 Label not_strings, both_strings, not_string1, string1, string1_smi2;
829
830 // If this stub has already generated FP-specific code then the arguments
831 // are already in rdx and rax.
832 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
833 GenerateLoadArguments(masm);
834 }
835
836 Condition is_smi;
837 is_smi = masm->CheckSmi(lhs);
838 __ j(is_smi, &not_string1);
839 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
840 __ j(above_equal, &not_string1);
841
842 // First argument is a a string, test second.
843 is_smi = masm->CheckSmi(rhs);
844 __ j(is_smi, &string1_smi2);
845 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
846 __ j(above_equal, &string1);
847
848 // First and second argument are strings.
849 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
850 __ TailCallStub(&string_add_stub);
851
852 __ bind(&string1_smi2);
853 // First argument is a string, second is a smi. Try to lookup the number
854 // string for the smi in the number string cache.
855 NumberToStringStub::GenerateLookupNumberStringCache(
856 masm, rhs, rbx, rcx, r8, true, &string1);
857
858 // Replace second argument on stack and tailcall string add stub to make
859 // the result.
860 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
861 __ TailCallStub(&string_add_stub);
862
863 // Only first argument is a string.
864 __ bind(&string1);
865 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
866
867 // First argument was not a string, test second.
868 __ bind(&not_string1);
869 is_smi = masm->CheckSmi(rhs);
870 __ j(is_smi, &not_strings);
871 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
872 __ j(above_equal, &not_strings);
873
874 // Only second argument is a string.
875 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
876
877 __ bind(&not_strings);
878 // Neither argument is a string.
879 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
880 break;
881 }
882 case Token::SUB:
883 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
884 break;
885 case Token::MUL:
886 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
887 break;
888 case Token::DIV:
889 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
890 break;
891 case Token::MOD:
892 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
893 break;
894 case Token::BIT_OR:
895 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
896 break;
897 case Token::BIT_AND:
898 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
899 break;
900 case Token::BIT_XOR:
901 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
902 break;
903 case Token::SAR:
904 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
905 break;
906 case Token::SHL:
907 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
908 break;
909 case Token::SHR:
910 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
911 break;
912 default:
913 UNREACHABLE();
914 }
915}
916
917
918void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
919 ASSERT(!HasArgsInRegisters());
920 __ movq(rax, Operand(rsp, 1 * kPointerSize));
921 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
922}
923
924
925void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
926 // If arguments are not passed in registers remove them from the stack before
927 // returning.
928 if (!HasArgsInRegisters()) {
929 __ ret(2 * kPointerSize); // Remove both operands
930 } else {
931 __ ret(0);
932 }
933}
934
935
936void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
937 ASSERT(HasArgsInRegisters());
938 __ pop(rcx);
939 if (HasArgsReversed()) {
940 __ push(rax);
941 __ push(rdx);
942 } else {
943 __ push(rdx);
944 __ push(rax);
945 }
946 __ push(rcx);
947}
948
949
950void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
951 Label get_result;
952
953 // Ensure the operands are on the stack.
954 if (HasArgsInRegisters()) {
955 GenerateRegisterArgsPush(masm);
956 }
957
958 // Left and right arguments are already on stack.
959 __ pop(rcx); // Save the return address.
960
961 // Push this stub's key.
962 __ Push(Smi::FromInt(MinorKey()));
963
964 // Although the operation and the type info are encoded into the key,
965 // the encoding is opaque, so push them too.
966 __ Push(Smi::FromInt(op_));
967
968 __ Push(Smi::FromInt(runtime_operands_type_));
969
970 __ push(rcx); // The return address.
971
972 // Perform patching to an appropriate fast case and return the result.
973 __ TailCallExternalReference(
974 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
975 5,
976 1);
977}
978
979
980Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
981 GenericBinaryOpStub stub(key, type_info);
982 return stub.GetCode();
983}
984
985
986void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
987 // Input on stack:
988 // rsp[8]: argument (should be number).
989 // rsp[0]: return address.
990 Label runtime_call;
991 Label runtime_call_clear_stack;
992 Label input_not_smi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100993 NearLabel loaded;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100994 // Test that rax is a number.
995 __ movq(rax, Operand(rsp, kPointerSize));
996 __ JumpIfNotSmi(rax, &input_not_smi);
997 // Input is a smi. Untag and load it onto the FPU stack.
998 // Then load the bits of the double into rbx.
999 __ SmiToInteger32(rax, rax);
1000 __ subq(rsp, Immediate(kPointerSize));
1001 __ cvtlsi2sd(xmm1, rax);
1002 __ movsd(Operand(rsp, 0), xmm1);
1003 __ movq(rbx, xmm1);
1004 __ movq(rdx, xmm1);
1005 __ fld_d(Operand(rsp, 0));
1006 __ addq(rsp, Immediate(kPointerSize));
1007 __ jmp(&loaded);
1008
1009 __ bind(&input_not_smi);
1010 // Check if input is a HeapNumber.
1011 __ Move(rbx, Factory::heap_number_map());
1012 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1013 __ j(not_equal, &runtime_call);
1014 // Input is a HeapNumber. Push it on the FPU stack and load its
1015 // bits into rbx.
1016 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1017 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1018 __ movq(rdx, rbx);
1019 __ bind(&loaded);
1020 // ST[0] == double value
1021 // rbx = bits of double value.
1022 // rdx = also bits of double value.
1023 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
1024 // h = h0 = bits ^ (bits >> 32);
1025 // h ^= h >> 16;
1026 // h ^= h >> 8;
1027 // h = h & (cacheSize - 1);
1028 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
1029 __ sar(rdx, Immediate(32));
1030 __ xorl(rdx, rbx);
1031 __ movl(rcx, rdx);
1032 __ movl(rax, rdx);
1033 __ movl(rdi, rdx);
1034 __ sarl(rdx, Immediate(8));
1035 __ sarl(rcx, Immediate(16));
1036 __ sarl(rax, Immediate(24));
1037 __ xorl(rcx, rdx);
1038 __ xorl(rax, rdi);
1039 __ xorl(rcx, rax);
1040 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
1041 __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
1042
1043 // ST[0] == double value.
1044 // rbx = bits of double value.
1045 // rcx = TranscendentalCache::hash(double value).
1046 __ movq(rax, ExternalReference::transcendental_cache_array_address());
1047 // rax points to cache array.
1048 __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
1049 // rax points to the cache for the type type_.
1050 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1051 __ testq(rax, rax);
1052 __ j(zero, &runtime_call_clear_stack);
1053#ifdef DEBUG
1054 // Check that the layout of cache elements match expectations.
1055 { // NOLINT - doesn't like a single brace on a line.
1056 TranscendentalCache::Element test_elem[2];
1057 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1058 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1059 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1060 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1061 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1062 // Two uint_32's and a pointer per element.
1063 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1064 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1065 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1066 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1067 }
1068#endif
1069 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1070 __ addl(rcx, rcx);
1071 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1072 // Check if cache matches: Double value is stored in uint32_t[2] array.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001073 NearLabel cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001074 __ cmpq(rbx, Operand(rcx, 0));
1075 __ j(not_equal, &cache_miss);
1076 // Cache hit!
1077 __ movq(rax, Operand(rcx, 2 * kIntSize));
1078 __ fstp(0); // Clear FPU stack.
1079 __ ret(kPointerSize);
1080
1081 __ bind(&cache_miss);
1082 // Update cache with new value.
1083 Label nan_result;
1084 GenerateOperation(masm, &nan_result);
1085 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
1086 __ movq(Operand(rcx, 0), rbx);
1087 __ movq(Operand(rcx, 2 * kIntSize), rax);
1088 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
1089 __ ret(kPointerSize);
1090
1091 __ bind(&runtime_call_clear_stack);
1092 __ fstp(0);
1093 __ bind(&runtime_call);
1094 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
1095
1096 __ bind(&nan_result);
1097 __ fstp(0); // Remove argument from FPU stack.
1098 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1099 __ movq(Operand(rcx, 0), rbx);
1100 __ movq(Operand(rcx, 2 * kIntSize), rax);
1101 __ ret(kPointerSize);
1102}
1103
1104
1105Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1106 switch (type_) {
1107 // Add more cases when necessary.
1108 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1109 case TranscendentalCache::COS: return Runtime::kMath_cos;
1110 default:
1111 UNIMPLEMENTED();
1112 return Runtime::kAbort;
1113 }
1114}
1115
1116
1117void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
1118 Label* on_nan_result) {
1119 // Registers:
1120 // rbx: Bits of input double. Must be preserved.
1121 // rcx: Pointer to cache entry. Must be preserved.
1122 // st(0): Input double
1123 Label done;
1124 ASSERT(type_ == TranscendentalCache::SIN ||
1125 type_ == TranscendentalCache::COS);
1126 // More transcendental types can be added later.
1127
1128 // Both fsin and fcos require arguments in the range +/-2^63 and
1129 // return NaN for infinities and NaN. They can share all code except
1130 // the actual fsin/fcos operation.
1131 Label in_range;
1132 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
1133 // work. We must reduce it to the appropriate range.
1134 __ movq(rdi, rbx);
1135 // Move exponent and sign bits to low bits.
1136 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
1137 // Remove sign bit.
1138 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
1139 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
1140 __ cmpl(rdi, Immediate(supported_exponent_limit));
1141 __ j(below, &in_range);
1142 // Check for infinity and NaN. Both return NaN for sin.
1143 __ cmpl(rdi, Immediate(0x7ff));
1144 __ j(equal, on_nan_result);
1145
1146 // Use fpmod to restrict argument to the range +/-2*PI.
1147 __ fldpi();
1148 __ fadd(0);
1149 __ fld(1);
1150 // FPU Stack: input, 2*pi, input.
1151 {
1152 Label no_exceptions;
1153 __ fwait();
1154 __ fnstsw_ax();
1155 // Clear if Illegal Operand or Zero Division exceptions are set.
1156 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1157 __ j(zero, &no_exceptions);
1158 __ fnclex();
1159 __ bind(&no_exceptions);
1160 }
1161
1162 // Compute st(0) % st(1)
1163 {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001164 NearLabel partial_remainder_loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001165 __ bind(&partial_remainder_loop);
1166 __ fprem1();
1167 __ fwait();
1168 __ fnstsw_ax();
1169 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1170 // If C2 is set, computation only has partial result. Loop to
1171 // continue computation.
1172 __ j(not_zero, &partial_remainder_loop);
1173 }
1174 // FPU Stack: input, 2*pi, input % 2*pi
1175 __ fstp(2);
1176 // FPU Stack: input % 2*pi, 2*pi,
1177 __ fstp(0);
1178 // FPU Stack: input % 2*pi
1179 __ bind(&in_range);
1180 switch (type_) {
1181 case TranscendentalCache::SIN:
1182 __ fsin();
1183 break;
1184 case TranscendentalCache::COS:
1185 __ fcos();
1186 break;
1187 default:
1188 UNREACHABLE();
1189 }
1190 __ bind(&done);
1191}
1192
1193
1194// Get the integer part of a heap number.
1195// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
1196void IntegerConvert(MacroAssembler* masm,
1197 Register result,
1198 Register source) {
1199 // Result may be rcx. If result and source are the same register, source will
1200 // be overwritten.
1201 ASSERT(!result.is(rdi) && !result.is(rbx));
1202 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
1203 // cvttsd2si (32-bit version) directly.
1204 Register double_exponent = rbx;
1205 Register double_value = rdi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001206 NearLabel done, exponent_63_plus;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001207 // Get double and extract exponent.
1208 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
1209 // Clear result preemptively, in case we need to return zero.
1210 __ xorl(result, result);
1211 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
1212 // Double to remove sign bit, shift exponent down to least significant bits.
1213 // and subtract bias to get the unshifted, unbiased exponent.
1214 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
1215 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
1216 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
1217 // Check whether the exponent is too big for a 63 bit unsigned integer.
1218 __ cmpl(double_exponent, Immediate(63));
1219 __ j(above_equal, &exponent_63_plus);
1220 // Handle exponent range 0..62.
1221 __ cvttsd2siq(result, xmm0);
1222 __ jmp(&done);
1223
1224 __ bind(&exponent_63_plus);
1225 // Exponent negative or 63+.
1226 __ cmpl(double_exponent, Immediate(83));
1227 // If exponent negative or above 83, number contains no significant bits in
1228 // the range 0..2^31, so result is zero, and rcx already holds zero.
1229 __ j(above, &done);
1230
1231 // Exponent in rage 63..83.
1232 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
1233 // the least significant exponent-52 bits.
1234
1235 // Negate low bits of mantissa if value is negative.
1236 __ addq(double_value, double_value); // Move sign bit to carry.
1237 __ sbbl(result, result); // And convert carry to -1 in result register.
1238 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
1239 __ addl(double_value, result);
1240 // Do xor in opposite directions depending on where we want the result
1241 // (depending on whether result is rcx or not).
1242
1243 if (result.is(rcx)) {
1244 __ xorl(double_value, result);
1245 // Left shift mantissa by (exponent - mantissabits - 1) to save the
1246 // bits that have positional values below 2^32 (the extra -1 comes from the
1247 // doubling done above to move the sign bit into the carry flag).
1248 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1249 __ shll_cl(double_value);
1250 __ movl(result, double_value);
1251 } else {
1252 // As the then-branch, but move double-value to result before shifting.
1253 __ xorl(result, double_value);
1254 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1255 __ shll_cl(result);
1256 }
1257
1258 __ bind(&done);
1259}
1260
1261
1262// Input: rdx, rax are the left and right objects of a bit op.
1263// Output: rax, rcx are left and right integers for a bit op.
1264void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1265 // Check float operands.
1266 Label done;
1267 Label rax_is_smi;
1268 Label rax_is_object;
1269 Label rdx_is_object;
1270
1271 __ JumpIfNotSmi(rdx, &rdx_is_object);
1272 __ SmiToInteger32(rdx, rdx);
1273 __ JumpIfSmi(rax, &rax_is_smi);
1274
1275 __ bind(&rax_is_object);
1276 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx.
1277 __ jmp(&done);
1278
1279 __ bind(&rdx_is_object);
1280 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx.
1281 __ JumpIfNotSmi(rax, &rax_is_object);
1282 __ bind(&rax_is_smi);
1283 __ SmiToInteger32(rcx, rax);
1284
1285 __ bind(&done);
1286 __ movl(rax, rdx);
1287}
1288
1289
1290// Input: rdx, rax are the left and right objects of a bit op.
1291// Output: rax, rcx are left and right integers for a bit op.
1292void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1293 Label* conversion_failure,
1294 Register heap_number_map) {
1295 // Check float operands.
1296 Label arg1_is_object, check_undefined_arg1;
1297 Label arg2_is_object, check_undefined_arg2;
1298 Label load_arg2, done;
1299
1300 __ JumpIfNotSmi(rdx, &arg1_is_object);
1301 __ SmiToInteger32(rdx, rdx);
1302 __ jmp(&load_arg2);
1303
1304 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1305 __ bind(&check_undefined_arg1);
1306 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1307 __ j(not_equal, conversion_failure);
1308 __ movl(rdx, Immediate(0));
1309 __ jmp(&load_arg2);
1310
1311 __ bind(&arg1_is_object);
1312 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1313 __ j(not_equal, &check_undefined_arg1);
1314 // Get the untagged integer version of the edx heap number in rcx.
1315 IntegerConvert(masm, rdx, rdx);
1316
1317 // Here rdx has the untagged integer, rax has a Smi or a heap number.
1318 __ bind(&load_arg2);
1319 // Test if arg2 is a Smi.
1320 __ JumpIfNotSmi(rax, &arg2_is_object);
1321 __ SmiToInteger32(rax, rax);
1322 __ movl(rcx, rax);
1323 __ jmp(&done);
1324
1325 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1326 __ bind(&check_undefined_arg2);
1327 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1328 __ j(not_equal, conversion_failure);
1329 __ movl(rcx, Immediate(0));
1330 __ jmp(&done);
1331
1332 __ bind(&arg2_is_object);
1333 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1334 __ j(not_equal, &check_undefined_arg2);
1335 // Get the untagged integer version of the rax heap number in rcx.
1336 IntegerConvert(masm, rcx, rax);
1337 __ bind(&done);
1338 __ movl(rax, rdx);
1339}
1340
1341
1342void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1343 __ SmiToInteger32(kScratchRegister, rdx);
1344 __ cvtlsi2sd(xmm0, kScratchRegister);
1345 __ SmiToInteger32(kScratchRegister, rax);
1346 __ cvtlsi2sd(xmm1, kScratchRegister);
1347}
1348
1349
1350void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1351 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1352 // Load operand in rdx into xmm0.
1353 __ JumpIfSmi(rdx, &load_smi_rdx);
1354 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1355 // Load operand in rax into xmm1.
1356 __ JumpIfSmi(rax, &load_smi_rax);
1357 __ bind(&load_nonsmi_rax);
1358 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1359 __ jmp(&done);
1360
1361 __ bind(&load_smi_rdx);
1362 __ SmiToInteger32(kScratchRegister, rdx);
1363 __ cvtlsi2sd(xmm0, kScratchRegister);
1364 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1365
1366 __ bind(&load_smi_rax);
1367 __ SmiToInteger32(kScratchRegister, rax);
1368 __ cvtlsi2sd(xmm1, kScratchRegister);
1369
1370 __ bind(&done);
1371}
1372
1373
1374void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1375 Label* not_numbers) {
1376 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1377 // Load operand in rdx into xmm0, or branch to not_numbers.
1378 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
1379 __ JumpIfSmi(rdx, &load_smi_rdx);
1380 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
1381 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
1382 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1383 // Load operand in rax into xmm1, or branch to not_numbers.
1384 __ JumpIfSmi(rax, &load_smi_rax);
1385
1386 __ bind(&load_nonsmi_rax);
1387 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1388 __ j(not_equal, not_numbers);
1389 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1390 __ jmp(&done);
1391
1392 __ bind(&load_smi_rdx);
1393 __ SmiToInteger32(kScratchRegister, rdx);
1394 __ cvtlsi2sd(xmm0, kScratchRegister);
1395 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1396
1397 __ bind(&load_smi_rax);
1398 __ SmiToInteger32(kScratchRegister, rax);
1399 __ cvtlsi2sd(xmm1, kScratchRegister);
1400 __ bind(&done);
1401}
1402
1403
1404void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
1405 Label slow, done;
1406
1407 if (op_ == Token::SUB) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001408 if (include_smi_code_) {
1409 // Check whether the value is a smi.
1410 Label try_float;
1411 __ JumpIfNotSmi(rax, &try_float);
1412 if (negative_zero_ == kIgnoreNegativeZero) {
1413 __ SmiCompare(rax, Smi::FromInt(0));
1414 __ j(equal, &done);
1415 }
1416 __ SmiNeg(rax, rax, &done);
Ben Murdochf87a2032010-10-22 12:50:53 +01001417 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001418
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001419 // Try floating point case.
1420 __ bind(&try_float);
1421 } else if (FLAG_debug_code) {
1422 __ AbortIfSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001423 }
1424
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001425 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1426 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
1427 __ j(not_equal, &slow);
1428 // Operand is a float, negate its value by flipping sign bit.
1429 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
1430 __ movq(kScratchRegister, Immediate(0x01));
1431 __ shl(kScratchRegister, Immediate(63));
1432 __ xor_(rdx, kScratchRegister); // Flip sign.
1433 // rdx is value to store.
1434 if (overwrite_ == UNARY_OVERWRITE) {
1435 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
1436 } else {
1437 __ AllocateHeapNumber(rcx, rbx, &slow);
1438 // rcx: allocated 'empty' number
1439 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
1440 __ movq(rax, rcx);
1441 }
1442 } else if (op_ == Token::BIT_NOT) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001443 if (include_smi_code_) {
1444 Label try_float;
1445 __ JumpIfNotSmi(rax, &try_float);
1446 __ SmiNot(rax, rax);
1447 __ jmp(&done);
1448 // Try floating point case.
1449 __ bind(&try_float);
1450 } else if (FLAG_debug_code) {
1451 __ AbortIfSmi(rax);
1452 }
1453
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001454 // Check if the operand is a heap number.
1455 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1456 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
1457 __ j(not_equal, &slow);
1458
1459 // Convert the heap number in rax to an untagged integer in rcx.
1460 IntegerConvert(masm, rax, rax);
1461
1462 // Do the bitwise operation and smi tag the result.
1463 __ notl(rax);
1464 __ Integer32ToSmi(rax, rax);
1465 }
1466
1467 // Return from the stub.
1468 __ bind(&done);
1469 __ StubReturn(1);
1470
1471 // Handle the slow case by jumping to the JavaScript builtin.
1472 __ bind(&slow);
1473 __ pop(rcx); // pop return address
1474 __ push(rax);
1475 __ push(rcx); // push return address
1476 switch (op_) {
1477 case Token::SUB:
1478 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1479 break;
1480 case Token::BIT_NOT:
1481 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1482 break;
1483 default:
1484 UNREACHABLE();
1485 }
1486}
1487
1488
1489void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1490 // The key is in rdx and the parameter count is in rax.
1491
1492 // The displacement is used for skipping the frame pointer on the
1493 // stack. It is the offset of the last parameter (if any) relative
1494 // to the frame pointer.
1495 static const int kDisplacement = 1 * kPointerSize;
1496
1497 // Check that the key is a smi.
1498 Label slow;
1499 __ JumpIfNotSmi(rdx, &slow);
1500
1501 // Check if the calling frame is an arguments adaptor frame.
1502 Label adaptor;
1503 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1504 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
1505 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1506 __ j(equal, &adaptor);
1507
1508 // Check index against formal parameters count limit passed in
1509 // through register rax. Use unsigned comparison to get negative
1510 // check for free.
1511 __ cmpq(rdx, rax);
1512 __ j(above_equal, &slow);
1513
1514 // Read the argument from the stack and return it.
1515 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
1516 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
1517 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
1518 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
1519 __ Ret();
1520
1521 // Arguments adaptor case: Check index against actual arguments
1522 // limit found in the arguments adaptor frame. Use unsigned
1523 // comparison to get negative check for free.
1524 __ bind(&adaptor);
1525 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1526 __ cmpq(rdx, rcx);
1527 __ j(above_equal, &slow);
1528
1529 // Read the argument from the stack and return it.
1530 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
1531 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
1532 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
1533 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
1534 __ Ret();
1535
1536 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1537 // by calling the runtime system.
1538 __ bind(&slow);
1539 __ pop(rbx); // Return address.
1540 __ push(rdx);
1541 __ push(rbx);
1542 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1543}
1544
1545
1546void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
1547 // rsp[0] : return address
1548 // rsp[8] : number of parameters
1549 // rsp[16] : receiver displacement
1550 // rsp[24] : function
1551
1552 // The displacement is used for skipping the return address and the
1553 // frame pointer on the stack. It is the offset of the last
1554 // parameter (if any) relative to the frame pointer.
1555 static const int kDisplacement = 2 * kPointerSize;
1556
1557 // Check if the calling frame is an arguments adaptor frame.
1558 Label adaptor_frame, try_allocate, runtime;
1559 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1560 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
1561 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1562 __ j(equal, &adaptor_frame);
1563
1564 // Get the length from the frame.
1565 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
1566 __ jmp(&try_allocate);
1567
1568 // Patch the arguments.length and the parameters pointer.
1569 __ bind(&adaptor_frame);
1570 __ SmiToInteger32(rcx,
1571 Operand(rdx,
1572 ArgumentsAdaptorFrameConstants::kLengthOffset));
1573 // Space on stack must already hold a smi.
1574 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
1575 // Do not clobber the length index for the indexing operation since
1576 // it is used compute the size for allocation later.
1577 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
1578 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
1579
1580 // Try the new space allocation. Start out with computing the size of
1581 // the arguments object and the elements array.
1582 Label add_arguments_object;
1583 __ bind(&try_allocate);
1584 __ testl(rcx, rcx);
1585 __ j(zero, &add_arguments_object);
1586 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
1587 __ bind(&add_arguments_object);
1588 __ addl(rcx, Immediate(Heap::kArgumentsObjectSize));
1589
1590 // Do the allocation of both objects in one go.
1591 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
1592
1593 // Get the arguments boilerplate from the current (global) context.
1594 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
1595 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1596 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
1597 __ movq(rdi, Operand(rdi, offset));
1598
1599 // Copy the JS object part.
1600 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
1601 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
1602 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
1603 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
1604 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
1605 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
1606 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
1607
1608 // Setup the callee in-object property.
1609 ASSERT(Heap::arguments_callee_index == 0);
1610 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
1611 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
1612
1613 // Get the length (smi tagged) and set that as an in-object property too.
1614 ASSERT(Heap::arguments_length_index == 1);
1615 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
1616 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
1617
1618 // If there are no actual arguments, we're done.
1619 Label done;
1620 __ SmiTest(rcx);
1621 __ j(zero, &done);
1622
1623 // Get the parameters pointer from the stack and untag the length.
1624 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1625
1626 // Setup the elements pointer in the allocated arguments object and
1627 // initialize the header in the elements fixed array.
1628 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
1629 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
1630 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1631 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
1632 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
1633 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
1634
1635 // Copy the fixed array slots.
1636 Label loop;
1637 __ bind(&loop);
1638 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
1639 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
1640 __ addq(rdi, Immediate(kPointerSize));
1641 __ subq(rdx, Immediate(kPointerSize));
1642 __ decl(rcx);
1643 __ j(not_zero, &loop);
1644
1645 // Return and remove the on-stack parameters.
1646 __ bind(&done);
1647 __ ret(3 * kPointerSize);
1648
1649 // Do the runtime call to allocate the arguments object.
1650 __ bind(&runtime);
1651 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1652}
1653
1654
1655void RegExpExecStub::Generate(MacroAssembler* masm) {
1656 // Just jump directly to runtime if native RegExp is not selected at compile
1657 // time or if regexp entry in generated code is turned off runtime switch or
1658 // at compilation.
1659#ifdef V8_INTERPRETED_REGEXP
1660 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1661#else // V8_INTERPRETED_REGEXP
1662 if (!FLAG_regexp_entry_native) {
1663 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1664 return;
1665 }
1666
1667 // Stack frame on entry.
1668 // esp[0]: return address
1669 // esp[8]: last_match_info (expected JSArray)
1670 // esp[16]: previous index
1671 // esp[24]: subject string
1672 // esp[32]: JSRegExp object
1673
1674 static const int kLastMatchInfoOffset = 1 * kPointerSize;
1675 static const int kPreviousIndexOffset = 2 * kPointerSize;
1676 static const int kSubjectOffset = 3 * kPointerSize;
1677 static const int kJSRegExpOffset = 4 * kPointerSize;
1678
1679 Label runtime;
1680
1681 // Ensure that a RegExp stack is allocated.
1682 ExternalReference address_of_regexp_stack_memory_address =
1683 ExternalReference::address_of_regexp_stack_memory_address();
1684 ExternalReference address_of_regexp_stack_memory_size =
1685 ExternalReference::address_of_regexp_stack_memory_size();
1686 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
1687 __ movq(kScratchRegister, Operand(kScratchRegister, 0));
1688 __ testq(kScratchRegister, kScratchRegister);
1689 __ j(zero, &runtime);
1690
1691
1692 // Check that the first argument is a JSRegExp object.
1693 __ movq(rax, Operand(rsp, kJSRegExpOffset));
1694 __ JumpIfSmi(rax, &runtime);
1695 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1696 __ j(not_equal, &runtime);
1697 // Check that the RegExp has been compiled (data contains a fixed array).
1698 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1699 if (FLAG_debug_code) {
1700 Condition is_smi = masm->CheckSmi(rcx);
1701 __ Check(NegateCondition(is_smi),
1702 "Unexpected type for RegExp data, FixedArray expected");
1703 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
1704 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
1705 }
1706
1707 // rcx: RegExp data (FixedArray)
1708 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1709 __ SmiToInteger32(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
1710 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1711 __ j(not_equal, &runtime);
1712
1713 // rcx: RegExp data (FixedArray)
1714 // Check that the number of captures fit in the static offsets vector buffer.
1715 __ SmiToInteger32(rdx,
1716 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1717 // Calculate number of capture registers (number_of_captures + 1) * 2.
1718 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
1719 // Check that the static offsets vector buffer is large enough.
1720 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
1721 __ j(above, &runtime);
1722
1723 // rcx: RegExp data (FixedArray)
1724 // rdx: Number of capture registers
1725 // Check that the second argument is a string.
1726 __ movq(rax, Operand(rsp, kSubjectOffset));
1727 __ JumpIfSmi(rax, &runtime);
1728 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
1729 __ j(NegateCondition(is_string), &runtime);
1730
1731 // rax: Subject string.
1732 // rcx: RegExp data (FixedArray).
1733 // rdx: Number of capture registers.
1734 // Check that the third argument is a positive smi less than the string
1735 // length. A negative value will be greater (unsigned comparison).
1736 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
1737 __ JumpIfNotSmi(rbx, &runtime);
1738 __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset));
1739 __ j(above_equal, &runtime);
1740
1741 // rcx: RegExp data (FixedArray)
1742 // rdx: Number of capture registers
1743 // Check that the fourth object is a JSArray object.
1744 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
1745 __ JumpIfSmi(rax, &runtime);
1746 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister);
1747 __ j(not_equal, &runtime);
1748 // Check that the JSArray is in fast case.
1749 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
1750 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
1751 __ Cmp(rax, Factory::fixed_array_map());
1752 __ j(not_equal, &runtime);
1753 // Check that the last match info has space for the capture registers and the
1754 // additional information. Ensure no overflow in add.
1755 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1756 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1757 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
1758 __ cmpl(rdx, rax);
1759 __ j(greater, &runtime);
1760
1761 // rcx: RegExp data (FixedArray)
1762 // Check the representation and encoding of the subject string.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001763 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001764 __ movq(rax, Operand(rsp, kSubjectOffset));
1765 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1766 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1767 // First check for flat two byte string.
1768 __ andb(rbx, Immediate(
1769 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
1770 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1771 __ j(zero, &seq_two_byte_string);
1772 // Any other flat string must be a flat ascii string.
1773 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
1774 __ j(zero, &seq_ascii_string);
1775
1776 // Check for flat cons string.
1777 // A flat cons string is a cons string where the second part is the empty
1778 // string. In that case the subject string is just the first part of the cons
1779 // string. Also in this case the first part of the cons string is known to be
1780 // a sequential string or an external string.
1781 STATIC_ASSERT(kExternalStringTag !=0);
1782 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
1783 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
1784 __ j(not_zero, &runtime);
1785 // String is a cons string.
1786 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset));
1787 __ Cmp(rdx, Factory::empty_string());
1788 __ j(not_equal, &runtime);
1789 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset));
1790 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1791 // String is a cons string with empty second part.
1792 // rax: first part of cons string.
1793 // rbx: map of first part of cons string.
1794 // Is first part a flat two byte string?
1795 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
1796 Immediate(kStringRepresentationMask | kStringEncodingMask));
1797 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1798 __ j(zero, &seq_two_byte_string);
1799 // Any other flat string must be ascii.
1800 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
1801 Immediate(kStringRepresentationMask));
1802 __ j(not_zero, &runtime);
1803
1804 __ bind(&seq_ascii_string);
1805 // rax: subject string (sequential ascii)
1806 // rcx: RegExp data (FixedArray)
1807 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset));
1808 __ Set(rdi, 1); // Type is ascii.
1809 __ jmp(&check_code);
1810
1811 __ bind(&seq_two_byte_string);
1812 // rax: subject string (flat two-byte)
1813 // rcx: RegExp data (FixedArray)
1814 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset));
1815 __ Set(rdi, 0); // Type is two byte.
1816
1817 __ bind(&check_code);
1818 // Check that the irregexp code has been generated for the actual string
1819 // encoding. If it has, the field contains a code object otherwise it contains
1820 // the hole.
1821 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
1822 __ j(not_equal, &runtime);
1823
1824 // rax: subject string
1825 // rdi: encoding of subject string (1 if ascii, 0 if two_byte);
1826 // r11: code
1827 // Load used arguments before starting to push arguments for call to native
1828 // RegExp code to avoid handling changing stack height.
1829 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
1830
1831 // rax: subject string
1832 // rbx: previous index
1833 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
1834 // r11: code
1835 // All checks done. Now push arguments for native regexp code.
1836 __ IncrementCounter(&Counters::regexp_entry_native, 1);
1837
1838 // rsi is caller save on Windows and used to pass parameter on Linux.
1839 __ push(rsi);
1840
1841 static const int kRegExpExecuteArguments = 7;
1842 __ PrepareCallCFunction(kRegExpExecuteArguments);
1843 int argument_slots_on_stack =
1844 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1845
1846 // Argument 7: Indicate that this is a direct call from JavaScript.
1847 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
1848 Immediate(1));
1849
1850 // Argument 6: Start (high end) of backtracking stack memory area.
1851 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
1852 __ movq(r9, Operand(kScratchRegister, 0));
1853 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
1854 __ addq(r9, Operand(kScratchRegister, 0));
1855 // Argument 6 passed in r9 on Linux and on the stack on Windows.
1856#ifdef _WIN64
1857 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
1858#endif
1859
1860 // Argument 5: static offsets vector buffer.
1861 __ movq(r8, ExternalReference::address_of_static_offsets_vector());
1862 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1863#ifdef _WIN64
1864 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
1865#endif
1866
1867 // First four arguments are passed in registers on both Linux and Windows.
1868#ifdef _WIN64
1869 Register arg4 = r9;
1870 Register arg3 = r8;
1871 Register arg2 = rdx;
1872 Register arg1 = rcx;
1873#else
1874 Register arg4 = rcx;
1875 Register arg3 = rdx;
1876 Register arg2 = rsi;
1877 Register arg1 = rdi;
1878#endif
1879
1880 // Keep track on aliasing between argX defined above and the registers used.
1881 // rax: subject string
1882 // rbx: previous index
1883 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
1884 // r11: code
1885
1886 // Argument 4: End of string data
1887 // Argument 3: Start of string data
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001888 NearLabel setup_two_byte, setup_rest;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001889 __ testb(rdi, rdi);
1890 __ j(zero, &setup_two_byte);
1891 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
1892 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
1893 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
1894 __ jmp(&setup_rest);
1895 __ bind(&setup_two_byte);
1896 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
1897 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
1898 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
1899
1900 __ bind(&setup_rest);
1901 // Argument 2: Previous index.
1902 __ movq(arg2, rbx);
1903
1904 // Argument 1: Subject string.
1905 __ movq(arg1, rax);
1906
1907 // Locate the code entry and call it.
1908 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1909 __ CallCFunction(r11, kRegExpExecuteArguments);
1910
1911 // rsi is caller save, as it is used to pass parameter.
1912 __ pop(rsi);
1913
1914 // Check the result.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001915 NearLabel success;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001916 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
1917 __ j(equal, &success);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001918 NearLabel failure;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001919 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1920 __ j(equal, &failure);
1921 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1922 // If not exception it can only be retry. Handle that in the runtime system.
1923 __ j(not_equal, &runtime);
1924 // Result must now be exception. If there is no pending exception already a
1925 // stack overflow (on the backtrack stack) was detected in RegExp code but
1926 // haven't created the exception yet. Handle that in the runtime system.
1927 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1928 ExternalReference pending_exception_address(Top::k_pending_exception_address);
1929 __ movq(kScratchRegister, pending_exception_address);
1930 __ Cmp(kScratchRegister, Factory::the_hole_value());
1931 __ j(equal, &runtime);
1932 __ bind(&failure);
1933 // For failure and exception return null.
1934 __ Move(rax, Factory::null_value());
1935 __ ret(4 * kPointerSize);
1936
1937 // Load RegExp data.
1938 __ bind(&success);
1939 __ movq(rax, Operand(rsp, kJSRegExpOffset));
1940 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1941 __ SmiToInteger32(rax,
1942 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1943 // Calculate number of capture registers (number_of_captures + 1) * 2.
1944 __ leal(rdx, Operand(rax, rax, times_1, 2));
1945
1946 // rdx: Number of capture registers
1947 // Load last_match_info which is still known to be a fast case JSArray.
1948 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
1949 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
1950
1951 // rbx: last_match_info backing store (FixedArray)
1952 // rdx: number of capture registers
1953 // Store the capture count.
1954 __ Integer32ToSmi(kScratchRegister, rdx);
1955 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
1956 kScratchRegister);
1957 // Store last subject and last input.
1958 __ movq(rax, Operand(rsp, kSubjectOffset));
1959 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
1960 __ movq(rcx, rbx);
1961 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
1962 __ movq(rax, Operand(rsp, kSubjectOffset));
1963 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
1964 __ movq(rcx, rbx);
1965 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
1966
1967 // Get the static offsets vector filled by the native regexp code.
1968 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
1969
1970 // rbx: last_match_info backing store (FixedArray)
1971 // rcx: offsets vector
1972 // rdx: number of capture registers
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001973 NearLabel next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001974 // Capture register counter starts from number of capture registers and
1975 // counts down until wraping after zero.
1976 __ bind(&next_capture);
1977 __ subq(rdx, Immediate(1));
1978 __ j(negative, &done);
1979 // Read the value from the static offsets vector buffer and make it a smi.
1980 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001981 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001982 // Store the smi value in the last match info.
1983 __ movq(FieldOperand(rbx,
1984 rdx,
1985 times_pointer_size,
1986 RegExpImpl::kFirstCaptureOffset),
1987 rdi);
1988 __ jmp(&next_capture);
1989 __ bind(&done);
1990
1991 // Return last match info.
1992 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
1993 __ ret(4 * kPointerSize);
1994
1995 // Do the runtime call to execute the regexp.
1996 __ bind(&runtime);
1997 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1998#endif // V8_INTERPRETED_REGEXP
1999}
2000
2001
2002void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2003 Register object,
2004 Register result,
2005 Register scratch1,
2006 Register scratch2,
2007 bool object_is_smi,
2008 Label* not_found) {
2009 // Use of registers. Register result is used as a temporary.
2010 Register number_string_cache = result;
2011 Register mask = scratch1;
2012 Register scratch = scratch2;
2013
2014 // Load the number string cache.
2015 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2016
2017 // Make the hash mask from the length of the number string cache. It
2018 // contains two elements (number and string) for each cache entry.
2019 __ SmiToInteger32(
2020 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2021 __ shrl(mask, Immediate(1));
2022 __ subq(mask, Immediate(1)); // Make mask.
2023
2024 // Calculate the entry in the number string cache. The hash value in the
2025 // number string cache for smis is just the smi value, and the hash for
2026 // doubles is the xor of the upper and lower words. See
2027 // Heap::GetNumberStringCache.
2028 Label is_smi;
2029 Label load_result_from_cache;
2030 if (!object_is_smi) {
2031 __ JumpIfSmi(object, &is_smi);
2032 __ CheckMap(object, Factory::heap_number_map(), not_found, true);
2033
2034 STATIC_ASSERT(8 == kDoubleSize);
2035 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2036 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2037 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2038
2039 Register index = scratch;
2040 Register probe = mask;
2041 __ movq(probe,
2042 FieldOperand(number_string_cache,
2043 index,
2044 times_1,
2045 FixedArray::kHeaderSize));
2046 __ JumpIfSmi(probe, not_found);
2047 ASSERT(CpuFeatures::IsSupported(SSE2));
2048 CpuFeatures::Scope fscope(SSE2);
2049 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2050 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
2051 __ ucomisd(xmm0, xmm1);
2052 __ j(parity_even, not_found); // Bail out if NaN is involved.
2053 __ j(not_equal, not_found); // The cache did not contain this value.
2054 __ jmp(&load_result_from_cache);
2055 }
2056
2057 __ bind(&is_smi);
2058 __ SmiToInteger32(scratch, object);
2059 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2060
2061 Register index = scratch;
2062 // Check if the entry is the smi we are looking for.
2063 __ cmpq(object,
2064 FieldOperand(number_string_cache,
2065 index,
2066 times_1,
2067 FixedArray::kHeaderSize));
2068 __ j(not_equal, not_found);
2069
2070 // Get the result from the cache.
2071 __ bind(&load_result_from_cache);
2072 __ movq(result,
2073 FieldOperand(number_string_cache,
2074 index,
2075 times_1,
2076 FixedArray::kHeaderSize + kPointerSize));
2077 __ IncrementCounter(&Counters::number_to_string_native, 1);
2078}
2079
2080
2081void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
2082 Register hash,
2083 Register mask) {
2084 __ and_(hash, mask);
2085 // Each entry in string cache consists of two pointer sized fields,
2086 // but times_twice_pointer_size (multiplication by 16) scale factor
2087 // is not supported by addrmode on x64 platform.
2088 // So we have to premultiply entry index before lookup.
2089 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
2090}
2091
2092
2093void NumberToStringStub::Generate(MacroAssembler* masm) {
2094 Label runtime;
2095
2096 __ movq(rbx, Operand(rsp, kPointerSize));
2097
2098 // Generate code to lookup number in the number string cache.
2099 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
2100 __ ret(1 * kPointerSize);
2101
2102 __ bind(&runtime);
2103 // Handle number to string in the runtime system if not found in the cache.
2104 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
2105}
2106
2107
2108static int NegativeComparisonResult(Condition cc) {
2109 ASSERT(cc != equal);
2110 ASSERT((cc == less) || (cc == less_equal)
2111 || (cc == greater) || (cc == greater_equal));
2112 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2113}
2114
2115
2116void CompareStub::Generate(MacroAssembler* masm) {
2117 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2118
2119 Label check_unequal_objects, done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002120
2121 // Compare two smis if required.
2122 if (include_smi_compare_) {
2123 Label non_smi, smi_done;
2124 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
2125 __ subq(rdx, rax);
2126 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002127 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002128 __ bind(&smi_done);
2129 __ movq(rax, rdx);
2130 __ ret(0);
2131 __ bind(&non_smi);
2132 } else if (FLAG_debug_code) {
2133 Label ok;
2134 __ JumpIfNotSmi(rdx, &ok);
2135 __ JumpIfNotSmi(rax, &ok);
2136 __ Abort("CompareStub: smi operands");
2137 __ bind(&ok);
2138 }
2139
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002140 // The compare stub returns a positive, negative, or zero 64-bit integer
2141 // value in rax, corresponding to result of comparing the two inputs.
2142 // NOTICE! This code is only reached after a smi-fast-case check, so
2143 // it is certain that at least one operand isn't a smi.
2144
2145 // Two identical objects are equal unless they are both NaN or undefined.
2146 {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002147 NearLabel not_identical;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002148 __ cmpq(rax, rdx);
2149 __ j(not_equal, &not_identical);
2150
2151 if (cc_ != equal) {
2152 // Check for undefined. undefined OP undefined is false even though
2153 // undefined == undefined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002154 NearLabel check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002155 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2156 __ j(not_equal, &check_for_nan);
2157 __ Set(rax, NegativeComparisonResult(cc_));
2158 __ ret(0);
2159 __ bind(&check_for_nan);
2160 }
2161
2162 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
2163 // so we do the second best thing - test it ourselves.
2164 // Note: if cc_ != equal, never_nan_nan_ is not used.
2165 // We cannot set rax to EQUAL until just before return because
2166 // rax must be unchanged on jump to not_identical.
2167
2168 if (never_nan_nan_ && (cc_ == equal)) {
2169 __ Set(rax, EQUAL);
2170 __ ret(0);
2171 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002172 NearLabel heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002173 // If it's not a heap number, then return equal for (in)equality operator.
2174 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2175 Factory::heap_number_map());
2176 __ j(equal, &heap_number);
2177 if (cc_ != equal) {
2178 // Call runtime on identical JSObjects. Otherwise return equal.
2179 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2180 __ j(above_equal, &not_identical);
2181 }
2182 __ Set(rax, EQUAL);
2183 __ ret(0);
2184
2185 __ bind(&heap_number);
2186 // It is a heap number, so return equal if it's not NaN.
2187 // For NaN, return 1 for every condition except greater and
2188 // greater-equal. Return -1 for them, so the comparison yields
2189 // false for all conditions except not-equal.
2190 __ Set(rax, EQUAL);
2191 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2192 __ ucomisd(xmm0, xmm0);
2193 __ setcc(parity_even, rax);
2194 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
2195 if (cc_ == greater_equal || cc_ == greater) {
2196 __ neg(rax);
2197 }
2198 __ ret(0);
2199 }
2200
2201 __ bind(&not_identical);
2202 }
2203
2204 if (cc_ == equal) { // Both strict and non-strict.
2205 Label slow; // Fallthrough label.
2206
2207 // If we're doing a strict equality comparison, we don't have to do
2208 // type conversion, so we generate code to do fast comparison for objects
2209 // and oddballs. Non-smi numbers and strings still go through the usual
2210 // slow-case code.
2211 if (strict_) {
2212 // If either is a Smi (we know that not both are), then they can only
2213 // be equal if the other is a HeapNumber. If so, use the slow case.
2214 {
2215 Label not_smis;
2216 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
2217
2218 // Check if the non-smi operand is a heap number.
2219 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
2220 Factory::heap_number_map());
2221 // If heap number, handle it in the slow case.
2222 __ j(equal, &slow);
2223 // Return non-equal. ebx (the lower half of rbx) is not zero.
2224 __ movq(rax, rbx);
2225 __ ret(0);
2226
2227 __ bind(&not_smis);
2228 }
2229
2230 // If either operand is a JSObject or an oddball value, then they are not
2231 // equal since their pointers are different
2232 // There is no test for undetectability in strict equality.
2233
2234 // If the first object is a JS object, we have done pointer comparison.
2235 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002236 NearLabel first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002237 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2238 __ j(below, &first_non_object);
2239 // Return non-zero (eax (not rax) is not zero)
2240 Label return_not_equal;
2241 STATIC_ASSERT(kHeapObjectTag != 0);
2242 __ bind(&return_not_equal);
2243 __ ret(0);
2244
2245 __ bind(&first_non_object);
2246 // Check for oddballs: true, false, null, undefined.
2247 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2248 __ j(equal, &return_not_equal);
2249
2250 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2251 __ j(above_equal, &return_not_equal);
2252
2253 // Check for oddballs: true, false, null, undefined.
2254 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2255 __ j(equal, &return_not_equal);
2256
2257 // Fall through to the general case.
2258 }
2259 __ bind(&slow);
2260 }
2261
2262 // Generate the number comparison code.
2263 if (include_number_compare_) {
2264 Label non_number_comparison;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002265 NearLabel unordered;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002266 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
2267 __ xorl(rax, rax);
2268 __ xorl(rcx, rcx);
2269 __ ucomisd(xmm0, xmm1);
2270
2271 // Don't base result on EFLAGS when a NaN is involved.
2272 __ j(parity_even, &unordered);
2273 // Return a result of -1, 0, or 1, based on EFLAGS.
2274 __ setcc(above, rax);
2275 __ setcc(below, rcx);
2276 __ subq(rax, rcx);
2277 __ ret(0);
2278
2279 // If one of the numbers was NaN, then the result is always false.
2280 // The cc is never not-equal.
2281 __ bind(&unordered);
2282 ASSERT(cc_ != not_equal);
2283 if (cc_ == less || cc_ == less_equal) {
2284 __ Set(rax, 1);
2285 } else {
2286 __ Set(rax, -1);
2287 }
2288 __ ret(0);
2289
2290 // The number comparison code did not provide a valid result.
2291 __ bind(&non_number_comparison);
2292 }
2293
2294 // Fast negative check for symbol-to-symbol equality.
2295 Label check_for_strings;
2296 if (cc_ == equal) {
2297 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
2298 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
2299
2300 // We've already checked for object identity, so if both operands
2301 // are symbols they aren't equal. Register eax (not rax) already holds a
2302 // non-zero value, which indicates not equal, so just return.
2303 __ ret(0);
2304 }
2305
2306 __ bind(&check_for_strings);
2307
2308 __ JumpIfNotBothSequentialAsciiStrings(
2309 rdx, rax, rcx, rbx, &check_unequal_objects);
2310
2311 // Inline comparison of ascii strings.
2312 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2313 rdx,
2314 rax,
2315 rcx,
2316 rbx,
2317 rdi,
2318 r8);
2319
2320#ifdef DEBUG
2321 __ Abort("Unexpected fall-through from string comparison");
2322#endif
2323
2324 __ bind(&check_unequal_objects);
2325 if (cc_ == equal && !strict_) {
2326 // Not strict equality. Objects are unequal if
2327 // they are both JSObjects and not undetectable,
2328 // and their pointers are different.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002329 NearLabel not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002330 // At most one is a smi, so we can test for smi by adding the two.
2331 // A smi plus a heap object has the low bit set, a heap object plus
2332 // a heap object has the low bit clear.
2333 STATIC_ASSERT(kSmiTag == 0);
2334 STATIC_ASSERT(kSmiTagMask == 1);
2335 __ lea(rcx, Operand(rax, rdx, times_1, 0));
2336 __ testb(rcx, Immediate(kSmiTagMask));
2337 __ j(not_zero, &not_both_objects);
2338 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2339 __ j(below, &not_both_objects);
2340 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2341 __ j(below, &not_both_objects);
2342 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2343 Immediate(1 << Map::kIsUndetectable));
2344 __ j(zero, &return_unequal);
2345 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2346 Immediate(1 << Map::kIsUndetectable));
2347 __ j(zero, &return_unequal);
2348 // The objects are both undetectable, so they both compare as the value
2349 // undefined, and are equal.
2350 __ Set(rax, EQUAL);
2351 __ bind(&return_unequal);
2352 // Return non-equal by returning the non-zero object pointer in eax,
2353 // or return equal if we fell through to here.
2354 __ ret(0);
2355 __ bind(&not_both_objects);
2356 }
2357
2358 // Push arguments below the return address to prepare jump to builtin.
2359 __ pop(rcx);
2360 __ push(rdx);
2361 __ push(rax);
2362
2363 // Figure out which native to call and setup the arguments.
2364 Builtins::JavaScript builtin;
2365 if (cc_ == equal) {
2366 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2367 } else {
2368 builtin = Builtins::COMPARE;
2369 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
2370 }
2371
2372 // Restore return address on the stack.
2373 __ push(rcx);
2374
2375 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2376 // tagged as a small integer.
2377 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2378}
2379
2380
2381void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
2382 Label* label,
2383 Register object,
2384 Register scratch) {
2385 __ JumpIfSmi(object, label);
2386 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
2387 __ movzxbq(scratch,
2388 FieldOperand(scratch, Map::kInstanceTypeOffset));
2389 // Ensure that no non-strings have the symbol bit set.
2390 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
2391 STATIC_ASSERT(kSymbolTag != 0);
2392 __ testb(scratch, Immediate(kIsSymbolMask));
2393 __ j(zero, label);
2394}
2395
2396
2397void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01002398 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002399}
2400
2401
2402void CallFunctionStub::Generate(MacroAssembler* masm) {
2403 Label slow;
2404
2405 // If the receiver might be a value (string, number or boolean) check for this
2406 // and box it if it is.
2407 if (ReceiverMightBeValue()) {
2408 // Get the receiver from the stack.
2409 // +1 ~ return address
2410 Label receiver_is_value, receiver_is_js_object;
2411 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
2412
2413 // Check if receiver is a smi (which is a number value).
2414 __ JumpIfSmi(rax, &receiver_is_value);
2415
2416 // Check if the receiver is a valid JS object.
2417 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
2418 __ j(above_equal, &receiver_is_js_object);
2419
2420 // Call the runtime to box the value.
2421 __ bind(&receiver_is_value);
2422 __ EnterInternalFrame();
2423 __ push(rax);
2424 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2425 __ LeaveInternalFrame();
2426 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
2427
2428 __ bind(&receiver_is_js_object);
2429 }
2430
2431 // Get the function to call from the stack.
2432 // +2 ~ receiver, return address
2433 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
2434
2435 // Check that the function really is a JavaScript function.
2436 __ JumpIfSmi(rdi, &slow);
2437 // Goto slow case if we do not have a function.
2438 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2439 __ j(not_equal, &slow);
2440
2441 // Fast-case: Just invoke the function.
2442 ParameterCount actual(argc_);
2443 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
2444
2445 // Slow-case: Non-function called.
2446 __ bind(&slow);
2447 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2448 // of the original receiver from the call site).
2449 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
2450 __ Set(rax, argc_);
2451 __ Set(rbx, 0);
2452 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2453 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
2454 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2455}
2456
2457
2458void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
2459 // Check that stack should contain next handler, frame pointer, state and
2460 // return address in that order.
2461 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
2462 StackHandlerConstants::kStateOffset);
2463 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
2464 StackHandlerConstants::kPCOffset);
2465
2466 ExternalReference handler_address(Top::k_handler_address);
2467 __ movq(kScratchRegister, handler_address);
2468 __ movq(rsp, Operand(kScratchRegister, 0));
2469 // get next in chain
2470 __ pop(rcx);
2471 __ movq(Operand(kScratchRegister, 0), rcx);
2472 __ pop(rbp); // pop frame pointer
2473 __ pop(rdx); // remove state
2474
2475 // Before returning we restore the context from the frame pointer if not NULL.
2476 // The frame pointer is NULL in the exception handler of a JS entry frame.
2477 __ xor_(rsi, rsi); // tentatively set context pointer to NULL
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002478 NearLabel skip;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002479 __ cmpq(rbp, Immediate(0));
2480 __ j(equal, &skip);
2481 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2482 __ bind(&skip);
2483 __ ret(0);
2484}
2485
2486
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002487void CEntryStub::GenerateCore(MacroAssembler* masm,
2488 Label* throw_normal_exception,
2489 Label* throw_termination_exception,
2490 Label* throw_out_of_memory_exception,
2491 bool do_gc,
2492 bool always_allocate_scope,
2493 int /* alignment_skew */) {
2494 // rax: result parameter for PerformGC, if any.
2495 // rbx: pointer to C function (C callee-saved).
2496 // rbp: frame pointer (restored after C call).
2497 // rsp: stack pointer (restored after C call).
2498 // r14: number of arguments including receiver (C callee-saved).
2499 // r12: pointer to the first argument (C callee-saved).
2500 // This pointer is reused in LeaveExitFrame(), so it is stored in a
2501 // callee-saved register.
2502
2503 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2504 // Complex results must be written to address passed as first argument.
2505 // AMD64 calling convention: a struct of two pointers in rax+rdx
2506
2507 // Check stack alignment.
2508 if (FLAG_debug_code) {
2509 __ CheckStackAlignment();
2510 }
2511
2512 if (do_gc) {
2513 // Pass failure code returned from last attempt as first argument to
2514 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
2515 // stack is known to be aligned. This function takes one argument which is
2516 // passed in register.
2517#ifdef _WIN64
2518 __ movq(rcx, rax);
2519#else // _WIN64
2520 __ movq(rdi, rax);
2521#endif
2522 __ movq(kScratchRegister,
2523 FUNCTION_ADDR(Runtime::PerformGC),
2524 RelocInfo::RUNTIME_ENTRY);
2525 __ call(kScratchRegister);
2526 }
2527
2528 ExternalReference scope_depth =
2529 ExternalReference::heap_always_allocate_scope_depth();
2530 if (always_allocate_scope) {
2531 __ movq(kScratchRegister, scope_depth);
2532 __ incl(Operand(kScratchRegister, 0));
2533 }
2534
2535 // Call C function.
2536#ifdef _WIN64
2537 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
2538 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002539 __ movq(StackSpaceOperand(0), r14); // argc.
2540 __ movq(StackSpaceOperand(1), r12); // argv.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002541 if (result_size_ < 2) {
2542 // Pass a pointer to the Arguments object as the first argument.
2543 // Return result in single register (rax).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002544 __ lea(rcx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002545 } else {
2546 ASSERT_EQ(2, result_size_);
2547 // Pass a pointer to the result location as the first argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002548 __ lea(rcx, StackSpaceOperand(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002549 // Pass a pointer to the Arguments object as the second argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002550 __ lea(rdx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002551 }
2552
2553#else // _WIN64
2554 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2555 __ movq(rdi, r14); // argc.
2556 __ movq(rsi, r12); // argv.
2557#endif
2558 __ call(rbx);
2559 // Result is in rax - do not destroy this register!
2560
2561 if (always_allocate_scope) {
2562 __ movq(kScratchRegister, scope_depth);
2563 __ decl(Operand(kScratchRegister, 0));
2564 }
2565
2566 // Check for failure result.
2567 Label failure_returned;
2568 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
2569#ifdef _WIN64
2570 // If return value is on the stack, pop it to registers.
2571 if (result_size_ > 1) {
2572 ASSERT_EQ(2, result_size_);
2573 // Read result values stored on stack. Result is stored
2574 // above the four argument mirror slots and the two
2575 // Arguments object slots.
2576 __ movq(rax, Operand(rsp, 6 * kPointerSize));
2577 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
2578 }
2579#endif
2580 __ lea(rcx, Operand(rax, 1));
2581 // Lower 2 bits of rcx are 0 iff rax has failure tag.
2582 __ testl(rcx, Immediate(kFailureTagMask));
2583 __ j(zero, &failure_returned);
2584
2585 // Exit the JavaScript to C++ exit frame.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002586 __ LeaveExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002587 __ ret(0);
2588
2589 // Handling of failure.
2590 __ bind(&failure_returned);
2591
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002592 NearLabel retry;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002593 // If the returned exception is RETRY_AFTER_GC continue at retry label
2594 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
2595 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2596 __ j(zero, &retry);
2597
2598 // Special handling of out of memory exceptions.
2599 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
2600 __ cmpq(rax, kScratchRegister);
2601 __ j(equal, throw_out_of_memory_exception);
2602
2603 // Retrieve the pending exception and clear the variable.
2604 ExternalReference pending_exception_address(Top::k_pending_exception_address);
2605 __ movq(kScratchRegister, pending_exception_address);
2606 __ movq(rax, Operand(kScratchRegister, 0));
2607 __ movq(rdx, ExternalReference::the_hole_value_location());
2608 __ movq(rdx, Operand(rdx, 0));
2609 __ movq(Operand(kScratchRegister, 0), rdx);
2610
2611 // Special handling of termination exceptions which are uncatchable
2612 // by javascript code.
2613 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2614 __ j(equal, throw_termination_exception);
2615
2616 // Handle normal exception.
2617 __ jmp(throw_normal_exception);
2618
2619 // Retry.
2620 __ bind(&retry);
2621}
2622
2623
2624void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
2625 UncatchableExceptionType type) {
2626 // Fetch top stack handler.
2627 ExternalReference handler_address(Top::k_handler_address);
2628 __ movq(kScratchRegister, handler_address);
2629 __ movq(rsp, Operand(kScratchRegister, 0));
2630
2631 // Unwind the handlers until the ENTRY handler is found.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002632 NearLabel loop, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002633 __ bind(&loop);
2634 // Load the type of the current stack handler.
2635 const int kStateOffset = StackHandlerConstants::kStateOffset;
2636 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
2637 __ j(equal, &done);
2638 // Fetch the next handler in the list.
2639 const int kNextOffset = StackHandlerConstants::kNextOffset;
2640 __ movq(rsp, Operand(rsp, kNextOffset));
2641 __ jmp(&loop);
2642 __ bind(&done);
2643
2644 // Set the top handler address to next handler past the current ENTRY handler.
2645 __ movq(kScratchRegister, handler_address);
2646 __ pop(Operand(kScratchRegister, 0));
2647
2648 if (type == OUT_OF_MEMORY) {
2649 // Set external caught exception to false.
2650 ExternalReference external_caught(Top::k_external_caught_exception_address);
2651 __ movq(rax, Immediate(false));
2652 __ store_rax(external_caught);
2653
2654 // Set pending exception and rax to out of memory exception.
2655 ExternalReference pending_exception(Top::k_pending_exception_address);
2656 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
2657 __ store_rax(pending_exception);
2658 }
2659
2660 // Clear the context pointer.
2661 __ xor_(rsi, rsi);
2662
2663 // Restore registers from handler.
2664 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
2665 StackHandlerConstants::kFPOffset);
2666 __ pop(rbp); // FP
2667 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
2668 StackHandlerConstants::kStateOffset);
2669 __ pop(rdx); // State
2670
2671 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
2672 StackHandlerConstants::kPCOffset);
2673 __ ret(0);
2674}
2675
2676
2677void CEntryStub::Generate(MacroAssembler* masm) {
2678 // rax: number of arguments including receiver
2679 // rbx: pointer to C function (C callee-saved)
2680 // rbp: frame pointer of calling JS frame (restored after C call)
2681 // rsp: stack pointer (restored after C call)
2682 // rsi: current context (restored)
2683
2684 // NOTE: Invocations of builtins may return failure objects
2685 // instead of a proper result. The builtin entry handles
2686 // this by performing a garbage collection and retrying the
2687 // builtin once.
2688
2689 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002690#ifdef _WIN64
2691 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
2692#else
2693 int arg_stack_space = 0;
2694#endif
2695 __ EnterExitFrame(arg_stack_space);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002696
2697 // rax: Holds the context at this point, but should not be used.
2698 // On entry to code generated by GenerateCore, it must hold
2699 // a failure result if the collect_garbage argument to GenerateCore
2700 // is true. This failure result can be the result of code
2701 // generated by a previous call to GenerateCore. The value
2702 // of rax is then passed to Runtime::PerformGC.
2703 // rbx: pointer to builtin function (C callee-saved).
2704 // rbp: frame pointer of exit frame (restored after C call).
2705 // rsp: stack pointer (restored after C call).
2706 // r14: number of arguments including receiver (C callee-saved).
2707 // r12: argv pointer (C callee-saved).
2708
2709 Label throw_normal_exception;
2710 Label throw_termination_exception;
2711 Label throw_out_of_memory_exception;
2712
2713 // Call into the runtime system.
2714 GenerateCore(masm,
2715 &throw_normal_exception,
2716 &throw_termination_exception,
2717 &throw_out_of_memory_exception,
2718 false,
2719 false);
2720
2721 // Do space-specific GC and retry runtime call.
2722 GenerateCore(masm,
2723 &throw_normal_exception,
2724 &throw_termination_exception,
2725 &throw_out_of_memory_exception,
2726 true,
2727 false);
2728
2729 // Do full GC and retry runtime call one final time.
2730 Failure* failure = Failure::InternalError();
2731 __ movq(rax, failure, RelocInfo::NONE);
2732 GenerateCore(masm,
2733 &throw_normal_exception,
2734 &throw_termination_exception,
2735 &throw_out_of_memory_exception,
2736 true,
2737 true);
2738
2739 __ bind(&throw_out_of_memory_exception);
2740 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
2741
2742 __ bind(&throw_termination_exception);
2743 GenerateThrowUncatchable(masm, TERMINATION);
2744
2745 __ bind(&throw_normal_exception);
2746 GenerateThrowTOS(masm);
2747}
2748
2749
2750void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2751 Label invoke, exit;
2752#ifdef ENABLE_LOGGING_AND_PROFILING
2753 Label not_outermost_js, not_outermost_js_2;
2754#endif
2755
2756 // Setup frame.
2757 __ push(rbp);
2758 __ movq(rbp, rsp);
2759
2760 // Push the stack frame type marker twice.
2761 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2762 // Scratch register is neither callee-save, nor an argument register on any
2763 // platform. It's free to use at this point.
2764 // Cannot use smi-register for loading yet.
2765 __ movq(kScratchRegister,
2766 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
2767 RelocInfo::NONE);
2768 __ push(kScratchRegister); // context slot
2769 __ push(kScratchRegister); // function slot
2770 // Save callee-saved registers (X64/Win64 calling conventions).
2771 __ push(r12);
2772 __ push(r13);
2773 __ push(r14);
2774 __ push(r15);
2775#ifdef _WIN64
2776 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2777 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2778#endif
2779 __ push(rbx);
2780 // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
2781 // callee save as well.
2782
2783 // Save copies of the top frame descriptor on the stack.
2784 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
2785 __ load_rax(c_entry_fp);
2786 __ push(rax);
2787
2788 // Set up the roots and smi constant registers.
2789 // Needs to be done before any further smi loads.
2790 ExternalReference roots_address = ExternalReference::roots_address();
2791 __ movq(kRootRegister, roots_address);
2792 __ InitializeSmiConstantRegister();
2793
2794#ifdef ENABLE_LOGGING_AND_PROFILING
2795 // If this is the outermost JS call, set js_entry_sp value.
2796 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
2797 __ load_rax(js_entry_sp);
2798 __ testq(rax, rax);
2799 __ j(not_zero, &not_outermost_js);
2800 __ movq(rax, rbp);
2801 __ store_rax(js_entry_sp);
2802 __ bind(&not_outermost_js);
2803#endif
2804
2805 // Call a faked try-block that does the invoke.
2806 __ call(&invoke);
2807
2808 // Caught exception: Store result (exception) in the pending
2809 // exception field in the JSEnv and return a failure sentinel.
2810 ExternalReference pending_exception(Top::k_pending_exception_address);
2811 __ store_rax(pending_exception);
2812 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
2813 __ jmp(&exit);
2814
2815 // Invoke: Link this frame into the handler chain.
2816 __ bind(&invoke);
2817 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
2818
2819 // Clear any pending exceptions.
2820 __ load_rax(ExternalReference::the_hole_value_location());
2821 __ store_rax(pending_exception);
2822
2823 // Fake a receiver (NULL).
2824 __ push(Immediate(0)); // receiver
2825
2826 // Invoke the function by calling through JS entry trampoline
2827 // builtin and pop the faked function when we return. We load the address
2828 // from an external reference instead of inlining the call target address
2829 // directly in the code, because the builtin stubs may not have been
2830 // generated yet at the time this code is generated.
2831 if (is_construct) {
2832 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
2833 __ load_rax(construct_entry);
2834 } else {
2835 ExternalReference entry(Builtins::JSEntryTrampoline);
2836 __ load_rax(entry);
2837 }
2838 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2839 __ call(kScratchRegister);
2840
2841 // Unlink this frame from the handler chain.
2842 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
2843 __ pop(Operand(kScratchRegister, 0));
2844 // Pop next_sp.
2845 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2846
2847#ifdef ENABLE_LOGGING_AND_PROFILING
2848 // If current EBP value is the same as js_entry_sp value, it means that
2849 // the current function is the outermost.
2850 __ movq(kScratchRegister, js_entry_sp);
2851 __ cmpq(rbp, Operand(kScratchRegister, 0));
2852 __ j(not_equal, &not_outermost_js_2);
2853 __ movq(Operand(kScratchRegister, 0), Immediate(0));
2854 __ bind(&not_outermost_js_2);
2855#endif
2856
2857 // Restore the top frame descriptor from the stack.
2858 __ bind(&exit);
2859 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
2860 __ pop(Operand(kScratchRegister, 0));
2861
2862 // Restore callee-saved registers (X64 conventions).
2863 __ pop(rbx);
2864#ifdef _WIN64
2865 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2866 __ pop(rsi);
2867 __ pop(rdi);
2868#endif
2869 __ pop(r15);
2870 __ pop(r14);
2871 __ pop(r13);
2872 __ pop(r12);
2873 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
2874
2875 // Restore frame pointer and return.
2876 __ pop(rbp);
2877 __ ret(0);
2878}
2879
2880
2881void InstanceofStub::Generate(MacroAssembler* masm) {
2882 // Implements "value instanceof function" operator.
2883 // Expected input state:
2884 // rsp[0] : return address
2885 // rsp[1] : function pointer
2886 // rsp[2] : value
2887 // Returns a bitwise zero to indicate that the value
2888 // is and instance of the function and anything else to
2889 // indicate that the value is not an instance.
2890
2891 // Get the object - go slow case if it's a smi.
2892 Label slow;
2893 __ movq(rax, Operand(rsp, 2 * kPointerSize));
2894 __ JumpIfSmi(rax, &slow);
2895
2896 // Check that the left hand is a JS object. Leave its map in rax.
2897 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
2898 __ j(below, &slow);
2899 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
2900 __ j(above, &slow);
2901
2902 // Get the prototype of the function.
2903 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
2904 // rdx is function, rax is map.
2905
2906 // Look up the function and the map in the instanceof cache.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002907 NearLabel miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002908 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2909 __ j(not_equal, &miss);
2910 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2911 __ j(not_equal, &miss);
2912 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2913 __ ret(2 * kPointerSize);
2914
2915 __ bind(&miss);
2916 __ TryGetFunctionPrototype(rdx, rbx, &slow);
2917
2918 // Check that the function prototype is a JS object.
2919 __ JumpIfSmi(rbx, &slow);
2920 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2921 __ j(below, &slow);
2922 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
2923 __ j(above, &slow);
2924
2925 // Register mapping:
2926 // rax is object map.
2927 // rdx is function.
2928 // rbx is function prototype.
2929 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
2930 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
2931
2932 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
2933
2934 // Loop through the prototype chain looking for the function prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002935 NearLabel loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002936 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
2937 __ bind(&loop);
2938 __ cmpq(rcx, rbx);
2939 __ j(equal, &is_instance);
2940 __ cmpq(rcx, kScratchRegister);
2941 // The code at is_not_instance assumes that kScratchRegister contains a
2942 // non-zero GCable value (the null object in this case).
2943 __ j(equal, &is_not_instance);
2944 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2945 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
2946 __ jmp(&loop);
2947
2948 __ bind(&is_instance);
2949 __ xorl(rax, rax);
2950 // Store bitwise zero in the cache. This is a Smi in GC terms.
2951 STATIC_ASSERT(kSmiTag == 0);
2952 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2953 __ ret(2 * kPointerSize);
2954
2955 __ bind(&is_not_instance);
2956 // We have to store a non-zero value in the cache.
2957 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
2958 __ ret(2 * kPointerSize);
2959
2960 // Slow-case: Go through the JavaScript implementation.
2961 __ bind(&slow);
2962 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2963}
2964
2965
2966int CompareStub::MinorKey() {
2967 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
2968 // stubs the never NaN NaN condition is only taken into account if the
2969 // condition is equals.
2970 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
2971 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2972 return ConditionField::encode(static_cast<unsigned>(cc_))
2973 | RegisterField::encode(false) // lhs_ and rhs_ are not used
2974 | StrictField::encode(strict_)
2975 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002976 | IncludeNumberCompareField::encode(include_number_compare_)
2977 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002978}
2979
2980
2981// Unfortunately you have to run without snapshots to see most of these
2982// names in the profile since most compare stubs end up in the snapshot.
2983const char* CompareStub::GetName() {
2984 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2985
2986 if (name_ != NULL) return name_;
2987 const int kMaxNameLength = 100;
2988 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
2989 if (name_ == NULL) return "OOM";
2990
2991 const char* cc_name;
2992 switch (cc_) {
2993 case less: cc_name = "LT"; break;
2994 case greater: cc_name = "GT"; break;
2995 case less_equal: cc_name = "LE"; break;
2996 case greater_equal: cc_name = "GE"; break;
2997 case equal: cc_name = "EQ"; break;
2998 case not_equal: cc_name = "NE"; break;
2999 default: cc_name = "UnknownCondition"; break;
3000 }
3001
3002 const char* strict_name = "";
3003 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
3004 strict_name = "_STRICT";
3005 }
3006
3007 const char* never_nan_nan_name = "";
3008 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
3009 never_nan_nan_name = "_NO_NAN";
3010 }
3011
3012 const char* include_number_compare_name = "";
3013 if (!include_number_compare_) {
3014 include_number_compare_name = "_NO_NUMBER";
3015 }
3016
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003017 const char* include_smi_compare_name = "";
3018 if (!include_smi_compare_) {
3019 include_smi_compare_name = "_NO_SMI";
3020 }
3021
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003022 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3023 "CompareStub_%s%s%s%s",
3024 cc_name,
3025 strict_name,
3026 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003027 include_number_compare_name,
3028 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003029 return name_;
3030}
3031
3032
3033// -------------------------------------------------------------------------
3034// StringCharCodeAtGenerator
3035
3036void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3037 Label flat_string;
3038 Label ascii_string;
3039 Label got_char_code;
3040
3041 // If the receiver is a smi trigger the non-string case.
3042 __ JumpIfSmi(object_, receiver_not_string_);
3043
3044 // Fetch the instance type of the receiver into result register.
3045 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3046 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3047 // If the receiver is not a string trigger the non-string case.
3048 __ testb(result_, Immediate(kIsNotStringMask));
3049 __ j(not_zero, receiver_not_string_);
3050
3051 // If the index is non-smi trigger the non-smi case.
3052 __ JumpIfNotSmi(index_, &index_not_smi_);
3053
3054 // Put smi-tagged index into scratch register.
3055 __ movq(scratch_, index_);
3056 __ bind(&got_smi_index_);
3057
3058 // Check for index out of range.
3059 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
3060 __ j(above_equal, index_out_of_range_);
3061
3062 // We need special handling for non-flat strings.
3063 STATIC_ASSERT(kSeqStringTag == 0);
3064 __ testb(result_, Immediate(kStringRepresentationMask));
3065 __ j(zero, &flat_string);
3066
3067 // Handle non-flat strings.
3068 __ testb(result_, Immediate(kIsConsStringMask));
3069 __ j(zero, &call_runtime_);
3070
3071 // ConsString.
3072 // Check whether the right hand side is the empty string (i.e. if
3073 // this is really a flat string in a cons string). If that is not
3074 // the case we would rather go to the runtime system now to flatten
3075 // the string.
3076 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
3077 Heap::kEmptyStringRootIndex);
3078 __ j(not_equal, &call_runtime_);
3079 // Get the first of the two strings and load its instance type.
3080 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
3081 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3082 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3083 // If the first cons component is also non-flat, then go to runtime.
3084 STATIC_ASSERT(kSeqStringTag == 0);
3085 __ testb(result_, Immediate(kStringRepresentationMask));
3086 __ j(not_zero, &call_runtime_);
3087
3088 // Check for 1-byte or 2-byte string.
3089 __ bind(&flat_string);
3090 STATIC_ASSERT(kAsciiStringTag != 0);
3091 __ testb(result_, Immediate(kStringEncodingMask));
3092 __ j(not_zero, &ascii_string);
3093
3094 // 2-byte string.
3095 // Load the 2-byte character code into the result register.
3096 __ SmiToInteger32(scratch_, scratch_);
3097 __ movzxwl(result_, FieldOperand(object_,
3098 scratch_, times_2,
3099 SeqTwoByteString::kHeaderSize));
3100 __ jmp(&got_char_code);
3101
3102 // ASCII string.
3103 // Load the byte into the result register.
3104 __ bind(&ascii_string);
3105 __ SmiToInteger32(scratch_, scratch_);
3106 __ movzxbl(result_, FieldOperand(object_,
3107 scratch_, times_1,
3108 SeqAsciiString::kHeaderSize));
3109 __ bind(&got_char_code);
3110 __ Integer32ToSmi(result_, result_);
3111 __ bind(&exit_);
3112}
3113
3114
3115void StringCharCodeAtGenerator::GenerateSlow(
3116 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3117 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
3118
3119 // Index is not a smi.
3120 __ bind(&index_not_smi_);
3121 // If index is a heap number, try converting it to an integer.
3122 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
3123 call_helper.BeforeCall(masm);
3124 __ push(object_);
3125 __ push(index_);
3126 __ push(index_); // Consumed by runtime conversion function.
3127 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3128 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3129 } else {
3130 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3131 // NumberToSmi discards numbers that are not exact integers.
3132 __ CallRuntime(Runtime::kNumberToSmi, 1);
3133 }
3134 if (!scratch_.is(rax)) {
3135 // Save the conversion result before the pop instructions below
3136 // have a chance to overwrite it.
3137 __ movq(scratch_, rax);
3138 }
3139 __ pop(index_);
3140 __ pop(object_);
3141 // Reload the instance type.
3142 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3143 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3144 call_helper.AfterCall(masm);
3145 // If index is still not a smi, it must be out of range.
3146 __ JumpIfNotSmi(scratch_, index_out_of_range_);
3147 // Otherwise, return to the fast path.
3148 __ jmp(&got_smi_index_);
3149
3150 // Call runtime. We get here when the receiver is a string and the
3151 // index is a number, but the code of getting the actual character
3152 // is too complex (e.g., when the string needs to be flattened).
3153 __ bind(&call_runtime_);
3154 call_helper.BeforeCall(masm);
3155 __ push(object_);
3156 __ push(index_);
3157 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3158 if (!result_.is(rax)) {
3159 __ movq(result_, rax);
3160 }
3161 call_helper.AfterCall(masm);
3162 __ jmp(&exit_);
3163
3164 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
3165}
3166
3167
3168// -------------------------------------------------------------------------
3169// StringCharFromCodeGenerator
3170
3171void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3172 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3173 __ JumpIfNotSmi(code_, &slow_case_);
3174 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
3175 __ j(above, &slow_case_);
3176
3177 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3178 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3179 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
3180 FixedArray::kHeaderSize));
3181 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3182 __ j(equal, &slow_case_);
3183 __ bind(&exit_);
3184}
3185
3186
3187void StringCharFromCodeGenerator::GenerateSlow(
3188 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3189 __ Abort("Unexpected fallthrough to CharFromCode slow case");
3190
3191 __ bind(&slow_case_);
3192 call_helper.BeforeCall(masm);
3193 __ push(code_);
3194 __ CallRuntime(Runtime::kCharFromCode, 1);
3195 if (!result_.is(rax)) {
3196 __ movq(result_, rax);
3197 }
3198 call_helper.AfterCall(masm);
3199 __ jmp(&exit_);
3200
3201 __ Abort("Unexpected fallthrough from CharFromCode slow case");
3202}
3203
3204
3205// -------------------------------------------------------------------------
3206// StringCharAtGenerator
3207
3208void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
3209 char_code_at_generator_.GenerateFast(masm);
3210 char_from_code_generator_.GenerateFast(masm);
3211}
3212
3213
3214void StringCharAtGenerator::GenerateSlow(
3215 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3216 char_code_at_generator_.GenerateSlow(masm, call_helper);
3217 char_from_code_generator_.GenerateSlow(masm, call_helper);
3218}
3219
3220
3221void StringAddStub::Generate(MacroAssembler* masm) {
3222 Label string_add_runtime;
3223
3224 // Load the two arguments.
3225 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument.
3226 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument.
3227
3228 // Make sure that both arguments are strings if not known in advance.
3229 if (string_check_) {
3230 Condition is_smi;
3231 is_smi = masm->CheckSmi(rax);
3232 __ j(is_smi, &string_add_runtime);
3233 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
3234 __ j(above_equal, &string_add_runtime);
3235
3236 // First argument is a a string, test second.
3237 is_smi = masm->CheckSmi(rdx);
3238 __ j(is_smi, &string_add_runtime);
3239 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3240 __ j(above_equal, &string_add_runtime);
3241 }
3242
3243 // Both arguments are strings.
3244 // rax: first string
3245 // rdx: second string
3246 // Check if either of the strings are empty. In that case return the other.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003247 NearLabel second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003248 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
3249 __ SmiTest(rcx);
3250 __ j(not_zero, &second_not_zero_length);
3251 // Second string is empty, result is first string which is already in rax.
3252 __ IncrementCounter(&Counters::string_add_native, 1);
3253 __ ret(2 * kPointerSize);
3254 __ bind(&second_not_zero_length);
3255 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
3256 __ SmiTest(rbx);
3257 __ j(not_zero, &both_not_zero_length);
3258 // First string is empty, result is second string which is in rdx.
3259 __ movq(rax, rdx);
3260 __ IncrementCounter(&Counters::string_add_native, 1);
3261 __ ret(2 * kPointerSize);
3262
3263 // Both strings are non-empty.
3264 // rax: first string
3265 // rbx: length of first string
3266 // rcx: length of second string
3267 // rdx: second string
3268 // r8: map of first string if string check was performed above
3269 // r9: map of second string if string check was performed above
3270 Label string_add_flat_result, longer_than_two;
3271 __ bind(&both_not_zero_length);
3272
3273 // If arguments where known to be strings, maps are not loaded to r8 and r9
3274 // by the code above.
3275 if (!string_check_) {
3276 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
3277 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
3278 }
3279 // Get the instance types of the two strings as they will be needed soon.
3280 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
3281 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
3282
3283 // Look at the length of the result of adding the two strings.
3284 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003285 __ SmiAdd(rbx, rbx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003286 // Use the runtime system when adding two one character strings, as it
3287 // contains optimizations for this specific case using the symbol table.
3288 __ SmiCompare(rbx, Smi::FromInt(2));
3289 __ j(not_equal, &longer_than_two);
3290
3291 // Check that both strings are non-external ascii strings.
3292 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
3293 &string_add_runtime);
3294
3295 // Get the two characters forming the sub string.
3296 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
3297 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
3298
3299 // Try to lookup two character string in symbol table. If it is not found
3300 // just allocate a new one.
3301 Label make_two_character_string, make_flat_ascii_string;
3302 StringHelper::GenerateTwoCharacterSymbolTableProbe(
3303 masm, rbx, rcx, r14, r11, rdi, r12, &make_two_character_string);
3304 __ IncrementCounter(&Counters::string_add_native, 1);
3305 __ ret(2 * kPointerSize);
3306
3307 __ bind(&make_two_character_string);
3308 __ Set(rbx, 2);
3309 __ jmp(&make_flat_ascii_string);
3310
3311 __ bind(&longer_than_two);
3312 // Check if resulting string will be flat.
3313 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
3314 __ j(below, &string_add_flat_result);
3315 // Handle exceptionally long strings in the runtime system.
3316 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
3317 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
3318 __ j(above, &string_add_runtime);
3319
3320 // If result is not supposed to be flat, allocate a cons string object. If
3321 // both strings are ascii the result is an ascii cons string.
3322 // rax: first string
3323 // rbx: length of resulting flat string
3324 // rdx: second string
3325 // r8: instance type of first string
3326 // r9: instance type of second string
3327 Label non_ascii, allocated, ascii_data;
3328 __ movl(rcx, r8);
3329 __ and_(rcx, r9);
3330 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
3331 __ testl(rcx, Immediate(kAsciiStringTag));
3332 __ j(zero, &non_ascii);
3333 __ bind(&ascii_data);
3334 // Allocate an acsii cons string.
3335 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
3336 __ bind(&allocated);
3337 // Fill the fields of the cons string.
3338 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
3339 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
3340 Immediate(String::kEmptyHashField));
3341 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3342 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3343 __ movq(rax, rcx);
3344 __ IncrementCounter(&Counters::string_add_native, 1);
3345 __ ret(2 * kPointerSize);
3346 __ bind(&non_ascii);
3347 // At least one of the strings is two-byte. Check whether it happens
3348 // to contain only ascii characters.
3349 // rcx: first instance type AND second instance type.
3350 // r8: first instance type.
3351 // r9: second instance type.
3352 __ testb(rcx, Immediate(kAsciiDataHintMask));
3353 __ j(not_zero, &ascii_data);
3354 __ xor_(r8, r9);
3355 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
3356 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3357 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3358 __ j(equal, &ascii_data);
3359 // Allocate a two byte cons string.
3360 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
3361 __ jmp(&allocated);
3362
3363 // Handle creating a flat result. First check that both strings are not
3364 // external strings.
3365 // rax: first string
3366 // rbx: length of resulting flat string as smi
3367 // rdx: second string
3368 // r8: instance type of first string
3369 // r9: instance type of first string
3370 __ bind(&string_add_flat_result);
3371 __ SmiToInteger32(rbx, rbx);
3372 __ movl(rcx, r8);
3373 __ and_(rcx, Immediate(kStringRepresentationMask));
3374 __ cmpl(rcx, Immediate(kExternalStringTag));
3375 __ j(equal, &string_add_runtime);
3376 __ movl(rcx, r9);
3377 __ and_(rcx, Immediate(kStringRepresentationMask));
3378 __ cmpl(rcx, Immediate(kExternalStringTag));
3379 __ j(equal, &string_add_runtime);
3380 // Now check if both strings are ascii strings.
3381 // rax: first string
3382 // rbx: length of resulting flat string
3383 // rdx: second string
3384 // r8: instance type of first string
3385 // r9: instance type of second string
3386 Label non_ascii_string_add_flat_result;
3387 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
3388 __ testl(r8, Immediate(kAsciiStringTag));
3389 __ j(zero, &non_ascii_string_add_flat_result);
3390 __ testl(r9, Immediate(kAsciiStringTag));
3391 __ j(zero, &string_add_runtime);
3392
3393 __ bind(&make_flat_ascii_string);
3394 // Both strings are ascii strings. As they are short they are both flat.
3395 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
3396 // rcx: result string
3397 __ movq(rbx, rcx);
3398 // Locate first character of result.
3399 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3400 // Locate first character of first argument
3401 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
3402 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3403 // rax: first char of first argument
3404 // rbx: result string
3405 // rcx: first character of result
3406 // rdx: second string
3407 // rdi: length of first argument
3408 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
3409 // Locate first character of second argument.
3410 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
3411 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3412 // rbx: result string
3413 // rcx: next character of result
3414 // rdx: first char of second argument
3415 // rdi: length of second argument
3416 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
3417 __ movq(rax, rbx);
3418 __ IncrementCounter(&Counters::string_add_native, 1);
3419 __ ret(2 * kPointerSize);
3420
3421 // Handle creating a flat two byte result.
3422 // rax: first string - known to be two byte
3423 // rbx: length of resulting flat string
3424 // rdx: second string
3425 // r8: instance type of first string
3426 // r9: instance type of first string
3427 __ bind(&non_ascii_string_add_flat_result);
3428 __ and_(r9, Immediate(kAsciiStringTag));
3429 __ j(not_zero, &string_add_runtime);
3430 // Both strings are two byte strings. As they are short they are both
3431 // flat.
3432 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
3433 // rcx: result string
3434 __ movq(rbx, rcx);
3435 // Locate first character of result.
3436 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3437 // Locate first character of first argument.
3438 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
3439 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3440 // rax: first char of first argument
3441 // rbx: result string
3442 // rcx: first character of result
3443 // rdx: second argument
3444 // rdi: length of first argument
3445 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
3446 // Locate first character of second argument.
3447 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
3448 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3449 // rbx: result string
3450 // rcx: next character of result
3451 // rdx: first char of second argument
3452 // rdi: length of second argument
3453 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
3454 __ movq(rax, rbx);
3455 __ IncrementCounter(&Counters::string_add_native, 1);
3456 __ ret(2 * kPointerSize);
3457
3458 // Just jump to runtime to add the two strings.
3459 __ bind(&string_add_runtime);
3460 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3461}
3462
3463
3464void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3465 Register dest,
3466 Register src,
3467 Register count,
3468 bool ascii) {
3469 Label loop;
3470 __ bind(&loop);
3471 // This loop just copies one character at a time, as it is only used for very
3472 // short strings.
3473 if (ascii) {
3474 __ movb(kScratchRegister, Operand(src, 0));
3475 __ movb(Operand(dest, 0), kScratchRegister);
3476 __ incq(src);
3477 __ incq(dest);
3478 } else {
3479 __ movzxwl(kScratchRegister, Operand(src, 0));
3480 __ movw(Operand(dest, 0), kScratchRegister);
3481 __ addq(src, Immediate(2));
3482 __ addq(dest, Immediate(2));
3483 }
3484 __ decl(count);
3485 __ j(not_zero, &loop);
3486}
3487
3488
3489void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3490 Register dest,
3491 Register src,
3492 Register count,
3493 bool ascii) {
3494 // Copy characters using rep movs of doublewords. Align destination on 4 byte
3495 // boundary before starting rep movs. Copy remaining characters after running
3496 // rep movs.
3497 // Count is positive int32, dest and src are character pointers.
3498 ASSERT(dest.is(rdi)); // rep movs destination
3499 ASSERT(src.is(rsi)); // rep movs source
3500 ASSERT(count.is(rcx)); // rep movs count
3501
3502 // Nothing to do for zero characters.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003503 NearLabel done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003504 __ testl(count, count);
3505 __ j(zero, &done);
3506
3507 // Make count the number of bytes to copy.
3508 if (!ascii) {
3509 STATIC_ASSERT(2 == sizeof(uc16));
3510 __ addl(count, count);
3511 }
3512
3513 // Don't enter the rep movs if there are less than 4 bytes to copy.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003514 NearLabel last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003515 __ testl(count, Immediate(~7));
3516 __ j(zero, &last_bytes);
3517
3518 // Copy from edi to esi using rep movs instruction.
3519 __ movl(kScratchRegister, count);
3520 __ shr(count, Immediate(3)); // Number of doublewords to copy.
3521 __ repmovsq();
3522
3523 // Find number of bytes left.
3524 __ movl(count, kScratchRegister);
3525 __ and_(count, Immediate(7));
3526
3527 // Check if there are more bytes to copy.
3528 __ bind(&last_bytes);
3529 __ testl(count, count);
3530 __ j(zero, &done);
3531
3532 // Copy remaining characters.
3533 Label loop;
3534 __ bind(&loop);
3535 __ movb(kScratchRegister, Operand(src, 0));
3536 __ movb(Operand(dest, 0), kScratchRegister);
3537 __ incq(src);
3538 __ incq(dest);
3539 __ decl(count);
3540 __ j(not_zero, &loop);
3541
3542 __ bind(&done);
3543}
3544
3545void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
3546 Register c1,
3547 Register c2,
3548 Register scratch1,
3549 Register scratch2,
3550 Register scratch3,
3551 Register scratch4,
3552 Label* not_found) {
3553 // Register scratch3 is the general scratch register in this function.
3554 Register scratch = scratch3;
3555
3556 // Make sure that both characters are not digits as such strings has a
3557 // different hash algorithm. Don't try to look for these in the symbol table.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003558 NearLabel not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003559 __ leal(scratch, Operand(c1, -'0'));
3560 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3561 __ j(above, &not_array_index);
3562 __ leal(scratch, Operand(c2, -'0'));
3563 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3564 __ j(below_equal, not_found);
3565
3566 __ bind(&not_array_index);
3567 // Calculate the two character string hash.
3568 Register hash = scratch1;
3569 GenerateHashInit(masm, hash, c1, scratch);
3570 GenerateHashAddCharacter(masm, hash, c2, scratch);
3571 GenerateHashGetHash(masm, hash, scratch);
3572
3573 // Collect the two characters in a register.
3574 Register chars = c1;
3575 __ shl(c2, Immediate(kBitsPerByte));
3576 __ orl(chars, c2);
3577
3578 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3579 // hash: hash of two character string.
3580
3581 // Load the symbol table.
3582 Register symbol_table = c2;
3583 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
3584
3585 // Calculate capacity mask from the symbol table capacity.
3586 Register mask = scratch2;
3587 __ SmiToInteger32(mask,
3588 FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
3589 __ decl(mask);
3590
3591 Register undefined = scratch4;
3592 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3593
3594 // Registers
3595 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3596 // hash: hash of two character string (32-bit int)
3597 // symbol_table: symbol table
3598 // mask: capacity mask (32-bit int)
3599 // undefined: undefined value
3600 // scratch: -
3601
3602 // Perform a number of probes in the symbol table.
3603 static const int kProbes = 4;
3604 Label found_in_symbol_table;
3605 Label next_probe[kProbes];
3606 for (int i = 0; i < kProbes; i++) {
3607 // Calculate entry in symbol table.
3608 __ movl(scratch, hash);
3609 if (i > 0) {
3610 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
3611 }
3612 __ andl(scratch, mask);
3613
3614 // Load the entry from the symble table.
3615 Register candidate = scratch; // Scratch register contains candidate.
3616 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
3617 __ movq(candidate,
3618 FieldOperand(symbol_table,
3619 scratch,
3620 times_pointer_size,
3621 SymbolTable::kElementsStartOffset));
3622
3623 // If entry is undefined no string with this hash can be found.
3624 __ cmpq(candidate, undefined);
3625 __ j(equal, not_found);
3626
3627 // If length is not 2 the string is not a candidate.
3628 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
3629 Smi::FromInt(2));
3630 __ j(not_equal, &next_probe[i]);
3631
3632 // We use kScratchRegister as a temporary register in assumption that
3633 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
3634 Register temp = kScratchRegister;
3635
3636 // Check that the candidate is a non-external ascii string.
3637 __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
3638 __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
3639 __ JumpIfInstanceTypeIsNotSequentialAscii(
3640 temp, temp, &next_probe[i]);
3641
3642 // Check if the two characters match.
3643 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
3644 __ andl(temp, Immediate(0x0000ffff));
3645 __ cmpl(chars, temp);
3646 __ j(equal, &found_in_symbol_table);
3647 __ bind(&next_probe[i]);
3648 }
3649
3650 // No matching 2 character string found by probing.
3651 __ jmp(not_found);
3652
3653 // Scratch register contains result when we fall through to here.
3654 Register result = scratch;
3655 __ bind(&found_in_symbol_table);
3656 if (!result.is(rax)) {
3657 __ movq(rax, result);
3658 }
3659}
3660
3661
3662void StringHelper::GenerateHashInit(MacroAssembler* masm,
3663 Register hash,
3664 Register character,
3665 Register scratch) {
3666 // hash = character + (character << 10);
3667 __ movl(hash, character);
3668 __ shll(hash, Immediate(10));
3669 __ addl(hash, character);
3670 // hash ^= hash >> 6;
3671 __ movl(scratch, hash);
3672 __ sarl(scratch, Immediate(6));
3673 __ xorl(hash, scratch);
3674}
3675
3676
3677void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
3678 Register hash,
3679 Register character,
3680 Register scratch) {
3681 // hash += character;
3682 __ addl(hash, character);
3683 // hash += hash << 10;
3684 __ movl(scratch, hash);
3685 __ shll(scratch, Immediate(10));
3686 __ addl(hash, scratch);
3687 // hash ^= hash >> 6;
3688 __ movl(scratch, hash);
3689 __ sarl(scratch, Immediate(6));
3690 __ xorl(hash, scratch);
3691}
3692
3693
3694void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
3695 Register hash,
3696 Register scratch) {
3697 // hash += hash << 3;
3698 __ leal(hash, Operand(hash, hash, times_8, 0));
3699 // hash ^= hash >> 11;
3700 __ movl(scratch, hash);
3701 __ sarl(scratch, Immediate(11));
3702 __ xorl(hash, scratch);
3703 // hash += hash << 15;
3704 __ movl(scratch, hash);
3705 __ shll(scratch, Immediate(15));
3706 __ addl(hash, scratch);
3707
3708 // if (hash == 0) hash = 27;
3709 Label hash_not_zero;
3710 __ j(not_zero, &hash_not_zero);
3711 __ movl(hash, Immediate(27));
3712 __ bind(&hash_not_zero);
3713}
3714
3715void SubStringStub::Generate(MacroAssembler* masm) {
3716 Label runtime;
3717
3718 // Stack frame on entry.
3719 // rsp[0]: return address
3720 // rsp[8]: to
3721 // rsp[16]: from
3722 // rsp[24]: string
3723
3724 const int kToOffset = 1 * kPointerSize;
3725 const int kFromOffset = kToOffset + kPointerSize;
3726 const int kStringOffset = kFromOffset + kPointerSize;
3727 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
3728
3729 // Make sure first argument is a string.
3730 __ movq(rax, Operand(rsp, kStringOffset));
3731 STATIC_ASSERT(kSmiTag == 0);
3732 __ testl(rax, Immediate(kSmiTagMask));
3733 __ j(zero, &runtime);
3734 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
3735 __ j(NegateCondition(is_string), &runtime);
3736
3737 // rax: string
3738 // rbx: instance type
3739 // Calculate length of sub string using the smi values.
3740 Label result_longer_than_two;
3741 __ movq(rcx, Operand(rsp, kToOffset));
3742 __ movq(rdx, Operand(rsp, kFromOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01003743 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003744
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003745 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003746 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
3747 Label return_rax;
3748 __ j(equal, &return_rax);
3749 // Special handling of sub-strings of length 1 and 2. One character strings
3750 // are handled in the runtime system (looked up in the single character
3751 // cache). Two character strings are looked for in the symbol cache.
3752 __ SmiToInteger32(rcx, rcx);
3753 __ cmpl(rcx, Immediate(2));
3754 __ j(greater, &result_longer_than_two);
3755 __ j(less, &runtime);
3756
3757 // Sub string of length 2 requested.
3758 // rax: string
3759 // rbx: instance type
3760 // rcx: sub string length (value is 2)
3761 // rdx: from index (smi)
3762 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
3763
3764 // Get the two characters forming the sub string.
3765 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
3766 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
3767 __ movzxbq(rcx,
3768 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
3769
3770 // Try to lookup two character string in symbol table.
3771 Label make_two_character_string;
3772 StringHelper::GenerateTwoCharacterSymbolTableProbe(
3773 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
3774 __ ret(3 * kPointerSize);
3775
3776 __ bind(&make_two_character_string);
3777 // Setup registers for allocating the two character string.
3778 __ movq(rax, Operand(rsp, kStringOffset));
3779 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
3780 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3781 __ Set(rcx, 2);
3782
3783 __ bind(&result_longer_than_two);
3784
3785 // rax: string
3786 // rbx: instance type
3787 // rcx: result string length
3788 // Check for flat ascii string
3789 Label non_ascii_flat;
3790 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
3791
3792 // Allocate the result.
3793 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
3794
3795 // rax: result string
3796 // rcx: result string length
3797 __ movq(rdx, rsi); // esi used by following code.
3798 // Locate first character of result.
3799 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
3800 // Load string argument and locate character of sub string start.
3801 __ movq(rsi, Operand(rsp, kStringOffset));
3802 __ movq(rbx, Operand(rsp, kFromOffset));
3803 {
3804 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
3805 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
3806 SeqAsciiString::kHeaderSize - kHeapObjectTag));
3807 }
3808
3809 // rax: result string
3810 // rcx: result length
3811 // rdx: original value of rsi
3812 // rdi: first character of result
3813 // rsi: character of sub string start
3814 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
3815 __ movq(rsi, rdx); // Restore rsi.
3816 __ IncrementCounter(&Counters::sub_string_native, 1);
3817 __ ret(kArgumentsSize);
3818
3819 __ bind(&non_ascii_flat);
3820 // rax: string
3821 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
3822 // rcx: result string length
3823 // Check for sequential two byte string
3824 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
3825 __ j(not_equal, &runtime);
3826
3827 // Allocate the result.
3828 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
3829
3830 // rax: result string
3831 // rcx: result string length
3832 __ movq(rdx, rsi); // esi used by following code.
3833 // Locate first character of result.
3834 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
3835 // Load string argument and locate character of sub string start.
3836 __ movq(rsi, Operand(rsp, kStringOffset));
3837 __ movq(rbx, Operand(rsp, kFromOffset));
3838 {
3839 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
3840 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
3841 SeqAsciiString::kHeaderSize - kHeapObjectTag));
3842 }
3843
3844 // rax: result string
3845 // rcx: result length
3846 // rdx: original value of rsi
3847 // rdi: first character of result
3848 // rsi: character of sub string start
3849 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
3850 __ movq(rsi, rdx); // Restore esi.
3851
3852 __ bind(&return_rax);
3853 __ IncrementCounter(&Counters::sub_string_native, 1);
3854 __ ret(kArgumentsSize);
3855
3856 // Just jump to runtime to create the sub string.
3857 __ bind(&runtime);
3858 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3859}
3860
3861
3862void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
3863 Register left,
3864 Register right,
3865 Register scratch1,
3866 Register scratch2,
3867 Register scratch3,
3868 Register scratch4) {
3869 // Ensure that you can always subtract a string length from a non-negative
3870 // number (e.g. another length).
3871 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
3872
3873 // Find minimum length and length difference.
3874 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
3875 __ movq(scratch4, scratch1);
3876 __ SmiSub(scratch4,
3877 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003878 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003879 // Register scratch4 now holds left.length - right.length.
3880 const Register length_difference = scratch4;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003881 NearLabel left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003882 __ j(less, &left_shorter);
3883 // The right string isn't longer that the left one.
3884 // Get the right string's length by subtracting the (non-negative) difference
3885 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003886 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003887 __ bind(&left_shorter);
3888 // Register scratch1 now holds Min(left.length, right.length).
3889 const Register min_length = scratch1;
3890
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003891 NearLabel compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003892 // If min-length is zero, go directly to comparing lengths.
3893 __ SmiTest(min_length);
3894 __ j(zero, &compare_lengths);
3895
3896 __ SmiToInteger32(min_length, min_length);
3897
3898 // Registers scratch2 and scratch3 are free.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003899 NearLabel result_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003900 Label loop;
3901 {
3902 // Check characters 0 .. min_length - 1 in a loop.
3903 // Use scratch3 as loop index, min_length as limit and scratch2
3904 // for computation.
3905 const Register index = scratch3;
3906 __ movl(index, Immediate(0)); // Index into strings.
3907 __ bind(&loop);
3908 // Compare characters.
3909 // TODO(lrn): Could we load more than one character at a time?
3910 __ movb(scratch2, FieldOperand(left,
3911 index,
3912 times_1,
3913 SeqAsciiString::kHeaderSize));
3914 // Increment index and use -1 modifier on next load to give
3915 // the previous load extra time to complete.
3916 __ addl(index, Immediate(1));
3917 __ cmpb(scratch2, FieldOperand(right,
3918 index,
3919 times_1,
3920 SeqAsciiString::kHeaderSize - 1));
3921 __ j(not_equal, &result_not_equal);
3922 __ cmpl(index, min_length);
3923 __ j(not_equal, &loop);
3924 }
3925 // Completed loop without finding different characters.
3926 // Compare lengths (precomputed).
3927 __ bind(&compare_lengths);
3928 __ SmiTest(length_difference);
3929 __ j(not_zero, &result_not_equal);
3930
3931 // Result is EQUAL.
3932 __ Move(rax, Smi::FromInt(EQUAL));
3933 __ ret(0);
3934
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003935 NearLabel result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003936 __ bind(&result_not_equal);
3937 // Unequal comparison of left to right, either character or length.
3938 __ j(greater, &result_greater);
3939
3940 // Result is LESS.
3941 __ Move(rax, Smi::FromInt(LESS));
3942 __ ret(0);
3943
3944 // Result is GREATER.
3945 __ bind(&result_greater);
3946 __ Move(rax, Smi::FromInt(GREATER));
3947 __ ret(0);
3948}
3949
3950
3951void StringCompareStub::Generate(MacroAssembler* masm) {
3952 Label runtime;
3953
3954 // Stack frame on entry.
3955 // rsp[0]: return address
3956 // rsp[8]: right string
3957 // rsp[16]: left string
3958
3959 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
3960 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
3961
3962 // Check for identity.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003963 NearLabel not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003964 __ cmpq(rdx, rax);
3965 __ j(not_equal, &not_same);
3966 __ Move(rax, Smi::FromInt(EQUAL));
3967 __ IncrementCounter(&Counters::string_compare_native, 1);
3968 __ ret(2 * kPointerSize);
3969
3970 __ bind(&not_same);
3971
3972 // Check that both are sequential ASCII strings.
3973 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
3974
3975 // Inline comparison of ascii strings.
3976 __ IncrementCounter(&Counters::string_compare_native, 1);
3977 // Drop arguments from the stack
3978 __ pop(rcx);
3979 __ addq(rsp, Immediate(2 * kPointerSize));
3980 __ push(rcx);
3981 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
3982
3983 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3984 // tagged as a small integer.
3985 __ bind(&runtime);
3986 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3987}
3988
3989#undef __
3990
3991} } // namespace v8::internal
3992
3993#endif // V8_TARGET_ARCH_X64