blob: 4b4531eb689dbb5b78709a7ecede24e3b303c0dc [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
34#include "regexp-macro-assembler.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010040
41void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax.
43 NearLabel check_heap_number, call_builtin;
44 __ SmiTest(rax);
45 __ j(not_zero, &check_heap_number);
46 __ Ret();
47
48 __ bind(&check_heap_number);
49 __ Move(rbx, Factory::heap_number_map());
50 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
51 __ j(not_equal, &call_builtin);
52 __ Ret();
53
54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address.
56 __ push(rax);
57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59}
60
61
Kristian Monsen80d68ea2010-09-08 11:05:35 +010062void FastNewClosureStub::Generate(MacroAssembler* masm) {
63 // Create a new closure from the given function info in new
64 // space. Set the context to the current context in rsi.
65 Label gc;
66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
67
68 // Get the function info from the stack.
69 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
71 // Compute the function map in the current global context and set that
72 // as the map of the allocated object.
73 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
74 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
75 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
76 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
77
78 // Initialize the rest of the function. We don't have to update the
79 // write barrier because the allocated object is in new space.
80 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
81 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010083 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
84 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
85 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
86 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
87 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
88 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010090
91 // Initialize the code pointer in the function to be the one
92 // found in the shared function info object.
93 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
94 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
95 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
96
97
98 // Return and remove the on-stack parameter.
99 __ ret(1 * kPointerSize);
100
101 // Create a new closure through the slower runtime call.
102 __ bind(&gc);
103 __ pop(rcx); // Temporarily remove return address.
104 __ pop(rdx);
105 __ push(rsi);
106 __ push(rdx);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800107 __ Push(Factory::false_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100108 __ push(rcx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800109 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100110}
111
112
113void FastNewContextStub::Generate(MacroAssembler* masm) {
114 // Try to allocate the context in new space.
115 Label gc;
116 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
117 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
118 rax, rbx, rcx, &gc, TAG_OBJECT);
119
120 // Get the function from the stack.
121 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
122
123 // Setup the object header.
124 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
125 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
126 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
127
128 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100129 __ Set(rbx, 0); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100130 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
131 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
132 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
133 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
134
135 // Copy the global object from the surrounding context.
136 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
137 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
138
139 // Initialize the rest of the slots to undefined.
140 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
141 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
142 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
143 }
144
145 // Return and remove the on-stack parameter.
146 __ movq(rsi, rax);
147 __ ret(1 * kPointerSize);
148
149 // Need to collect. Call into runtime system.
150 __ bind(&gc);
151 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
152}
153
154
155void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
156 // Stack layout on entry:
157 //
158 // [rsp + kPointerSize]: constant elements.
159 // [rsp + (2 * kPointerSize)]: literal index.
160 // [rsp + (3 * kPointerSize)]: literals array.
161
162 // All sizes here are multiples of kPointerSize.
163 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
164 int size = JSArray::kSize + elements_size;
165
166 // Load boilerplate object into rcx and check if we need to create a
167 // boilerplate.
168 Label slow_case;
169 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
170 __ movq(rax, Operand(rsp, 2 * kPointerSize));
171 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
172 __ movq(rcx,
173 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
174 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
175 __ j(equal, &slow_case);
176
177 if (FLAG_debug_code) {
178 const char* message;
179 Heap::RootListIndex expected_map_index;
180 if (mode_ == CLONE_ELEMENTS) {
181 message = "Expected (writable) fixed array";
182 expected_map_index = Heap::kFixedArrayMapRootIndex;
183 } else {
184 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
185 message = "Expected copy-on-write fixed array";
186 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
187 }
188 __ push(rcx);
189 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
190 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
191 expected_map_index);
192 __ Assert(equal, message);
193 __ pop(rcx);
194 }
195
196 // Allocate both the JS array and the elements array in one big
197 // allocation. This avoids multiple limit checks.
198 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
199
200 // Copy the JS array part.
201 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
202 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
203 __ movq(rbx, FieldOperand(rcx, i));
204 __ movq(FieldOperand(rax, i), rbx);
205 }
206 }
207
208 if (length_ > 0) {
209 // Get hold of the elements array of the boilerplate and setup the
210 // elements pointer in the resulting object.
211 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
212 __ lea(rdx, Operand(rax, JSArray::kSize));
213 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
214
215 // Copy the elements array.
216 for (int i = 0; i < elements_size; i += kPointerSize) {
217 __ movq(rbx, FieldOperand(rcx, i));
218 __ movq(FieldOperand(rdx, i), rbx);
219 }
220 }
221
222 // Return and remove the on-stack parameters.
223 __ ret(3 * kPointerSize);
224
225 __ bind(&slow_case);
226 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
227}
228
229
230void ToBooleanStub::Generate(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100231 NearLabel false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100232 __ movq(rax, Operand(rsp, 1 * kPointerSize));
233
234 // 'null' => false.
235 __ CompareRoot(rax, Heap::kNullValueRootIndex);
236 __ j(equal, &false_result);
237
238 // Get the map and type of the heap object.
239 // We don't use CmpObjectType because we manipulate the type field.
240 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
241 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
242
243 // Undetectable => false.
244 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
245 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
246 __ j(not_zero, &false_result);
247
248 // JavaScript object => true.
249 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
250 __ j(above_equal, &true_result);
251
252 // String value => false iff empty.
253 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
254 __ j(above_equal, &not_string);
255 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
256 __ SmiTest(rdx);
257 __ j(zero, &false_result);
258 __ jmp(&true_result);
259
260 __ bind(&not_string);
261 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
262 __ j(not_equal, &true_result);
263 // HeapNumber => false iff +0, -0, or NaN.
264 // These three cases set the zero flag when compared to zero using ucomisd.
265 __ xorpd(xmm0, xmm0);
266 __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
267 __ j(zero, &false_result);
268 // Fall through to |true_result|.
269
270 // Return 1/0 for true/false in rax.
271 __ bind(&true_result);
272 __ movq(rax, Immediate(1));
273 __ ret(1 * kPointerSize);
274 __ bind(&false_result);
Steve Block9fac8402011-05-12 15:51:54 +0100275 __ Set(rax, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100276 __ ret(1 * kPointerSize);
277}
278
279
280const char* GenericBinaryOpStub::GetName() {
281 if (name_ != NULL) return name_;
282 const int kMaxNameLength = 100;
283 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
284 if (name_ == NULL) return "OOM";
285 const char* op_name = Token::Name(op_);
286 const char* overwrite_name;
287 switch (mode_) {
288 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
289 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
290 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
291 default: overwrite_name = "UnknownOverwrite"; break;
292 }
293
294 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
295 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
296 op_name,
297 overwrite_name,
298 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
299 args_in_registers_ ? "RegArgs" : "StackArgs",
300 args_reversed_ ? "_R" : "",
301 static_operands_type_.ToString(),
302 BinaryOpIC::GetName(runtime_operands_type_));
303 return name_;
304}
305
306
307void GenericBinaryOpStub::GenerateCall(
308 MacroAssembler* masm,
309 Register left,
310 Register right) {
311 if (!ArgsInRegistersSupported()) {
312 // Pass arguments on the stack.
313 __ push(left);
314 __ push(right);
315 } else {
316 // The calling convention with registers is left in rdx and right in rax.
317 Register left_arg = rdx;
318 Register right_arg = rax;
319 if (!(left.is(left_arg) && right.is(right_arg))) {
320 if (left.is(right_arg) && right.is(left_arg)) {
321 if (IsOperationCommutative()) {
322 SetArgsReversed();
323 } else {
324 __ xchg(left, right);
325 }
326 } else if (left.is(left_arg)) {
327 __ movq(right_arg, right);
328 } else if (right.is(right_arg)) {
329 __ movq(left_arg, left);
330 } else if (left.is(right_arg)) {
331 if (IsOperationCommutative()) {
332 __ movq(left_arg, right);
333 SetArgsReversed();
334 } else {
335 // Order of moves important to avoid destroying left argument.
336 __ movq(left_arg, left);
337 __ movq(right_arg, right);
338 }
339 } else if (right.is(left_arg)) {
340 if (IsOperationCommutative()) {
341 __ movq(right_arg, left);
342 SetArgsReversed();
343 } else {
344 // Order of moves important to avoid destroying right argument.
345 __ movq(right_arg, right);
346 __ movq(left_arg, left);
347 }
348 } else {
349 // Order of moves is not important.
350 __ movq(left_arg, left);
351 __ movq(right_arg, right);
352 }
353 }
354
355 // Update flags to indicate that arguments are in registers.
356 SetArgsInRegisters();
357 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
358 }
359
360 // Call the stub.
361 __ CallStub(this);
362}
363
364
365void GenericBinaryOpStub::GenerateCall(
366 MacroAssembler* masm,
367 Register left,
368 Smi* right) {
369 if (!ArgsInRegistersSupported()) {
370 // Pass arguments on the stack.
371 __ push(left);
372 __ Push(right);
373 } else {
374 // The calling convention with registers is left in rdx and right in rax.
375 Register left_arg = rdx;
376 Register right_arg = rax;
377 if (left.is(left_arg)) {
378 __ Move(right_arg, right);
379 } else if (left.is(right_arg) && IsOperationCommutative()) {
380 __ Move(left_arg, right);
381 SetArgsReversed();
382 } else {
383 // For non-commutative operations, left and right_arg might be
384 // the same register. Therefore, the order of the moves is
385 // important here in order to not overwrite left before moving
386 // it to left_arg.
387 __ movq(left_arg, left);
388 __ Move(right_arg, right);
389 }
390
391 // Update flags to indicate that arguments are in registers.
392 SetArgsInRegisters();
393 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
394 }
395
396 // Call the stub.
397 __ CallStub(this);
398}
399
400
401void GenericBinaryOpStub::GenerateCall(
402 MacroAssembler* masm,
403 Smi* left,
404 Register right) {
405 if (!ArgsInRegistersSupported()) {
406 // Pass arguments on the stack.
407 __ Push(left);
408 __ push(right);
409 } else {
410 // The calling convention with registers is left in rdx and right in rax.
411 Register left_arg = rdx;
412 Register right_arg = rax;
413 if (right.is(right_arg)) {
414 __ Move(left_arg, left);
415 } else if (right.is(left_arg) && IsOperationCommutative()) {
416 __ Move(right_arg, left);
417 SetArgsReversed();
418 } else {
419 // For non-commutative operations, right and left_arg might be
420 // the same register. Therefore, the order of the moves is
421 // important here in order to not overwrite right before moving
422 // it to right_arg.
423 __ movq(right_arg, right);
424 __ Move(left_arg, left);
425 }
426 // Update flags to indicate that arguments are in registers.
427 SetArgsInRegisters();
428 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
429 }
430
431 // Call the stub.
432 __ CallStub(this);
433}
434
435
436class FloatingPointHelper : public AllStatic {
437 public:
438 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
439 // If the operands are not both numbers, jump to not_numbers.
440 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
441 // NumberOperands assumes both are smis or heap numbers.
442 static void LoadSSE2SmiOperands(MacroAssembler* masm);
443 static void LoadSSE2NumberOperands(MacroAssembler* masm);
444 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
445 Label* not_numbers);
446
447 // Takes the operands in rdx and rax and loads them as integers in rax
448 // and rcx.
449 static void LoadAsIntegers(MacroAssembler* masm,
450 Label* operand_conversion_failure,
451 Register heap_number_map);
452 // As above, but we know the operands to be numbers. In that case,
453 // conversion can't fail.
454 static void LoadNumbersAsIntegers(MacroAssembler* masm);
455};
456
457
458void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
459 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
460 // dividend in rax and rdx free for the division. Use rax, rbx for those.
461 Comment load_comment(masm, "-- Load arguments");
462 Register left = rdx;
463 Register right = rax;
464 if (op_ == Token::DIV || op_ == Token::MOD) {
465 left = rax;
466 right = rbx;
467 if (HasArgsInRegisters()) {
468 __ movq(rbx, rax);
469 __ movq(rax, rdx);
470 }
471 }
472 if (!HasArgsInRegisters()) {
473 __ movq(right, Operand(rsp, 1 * kPointerSize));
474 __ movq(left, Operand(rsp, 2 * kPointerSize));
475 }
476
477 Label not_smis;
478 // 2. Smi check both operands.
479 if (static_operands_type_.IsSmi()) {
480 // Skip smi check if we know that both arguments are smis.
481 if (FLAG_debug_code) {
482 __ AbortIfNotSmi(left);
483 __ AbortIfNotSmi(right);
484 }
485 if (op_ == Token::BIT_OR) {
486 // Handle OR here, since we do extra smi-checking in the or code below.
487 __ SmiOr(right, right, left);
488 GenerateReturn(masm);
489 return;
490 }
491 } else {
492 if (op_ != Token::BIT_OR) {
493 // Skip the check for OR as it is better combined with the
494 // actual operation.
495 Comment smi_check_comment(masm, "-- Smi check arguments");
496 __ JumpIfNotBothSmi(left, right, &not_smis);
497 }
498 }
499
500 // 3. Operands are both smis (except for OR), perform the operation leaving
501 // the result in rax and check the result if necessary.
502 Comment perform_smi(masm, "-- Perform smi operation");
503 Label use_fp_on_smis;
504 switch (op_) {
505 case Token::ADD: {
506 ASSERT(right.is(rax));
507 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
508 break;
509 }
510
511 case Token::SUB: {
512 __ SmiSub(left, left, right, &use_fp_on_smis);
513 __ movq(rax, left);
514 break;
515 }
516
517 case Token::MUL:
518 ASSERT(right.is(rax));
519 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
520 break;
521
522 case Token::DIV:
523 ASSERT(left.is(rax));
524 __ SmiDiv(left, left, right, &use_fp_on_smis);
525 break;
526
527 case Token::MOD:
528 ASSERT(left.is(rax));
529 __ SmiMod(left, left, right, slow);
530 break;
531
532 case Token::BIT_OR:
533 ASSERT(right.is(rax));
534 __ movq(rcx, right); // Save the right operand.
535 __ SmiOr(right, right, left); // BIT_OR is commutative.
536 __ testb(right, Immediate(kSmiTagMask));
537 __ j(not_zero, &not_smis);
538 break;
539
540 case Token::BIT_AND:
541 ASSERT(right.is(rax));
542 __ SmiAnd(right, right, left); // BIT_AND is commutative.
543 break;
544
545 case Token::BIT_XOR:
546 ASSERT(right.is(rax));
547 __ SmiXor(right, right, left); // BIT_XOR is commutative.
548 break;
549
550 case Token::SHL:
551 case Token::SHR:
552 case Token::SAR:
553 switch (op_) {
554 case Token::SAR:
555 __ SmiShiftArithmeticRight(left, left, right);
556 break;
557 case Token::SHR:
558 __ SmiShiftLogicalRight(left, left, right, slow);
559 break;
560 case Token::SHL:
561 __ SmiShiftLeft(left, left, right);
562 break;
563 default:
564 UNREACHABLE();
565 }
566 __ movq(rax, left);
567 break;
568
569 default:
570 UNREACHABLE();
571 break;
572 }
573
574 // 4. Emit return of result in rax.
575 GenerateReturn(masm);
576
577 // 5. For some operations emit inline code to perform floating point
578 // operations on known smis (e.g., if the result of the operation
579 // overflowed the smi range).
580 switch (op_) {
581 case Token::ADD:
582 case Token::SUB:
583 case Token::MUL:
584 case Token::DIV: {
585 ASSERT(use_fp_on_smis.is_linked());
586 __ bind(&use_fp_on_smis);
587 if (op_ == Token::DIV) {
588 __ movq(rdx, rax);
589 __ movq(rax, rbx);
590 }
591 // left is rdx, right is rax.
592 __ AllocateHeapNumber(rbx, rcx, slow);
593 FloatingPointHelper::LoadSSE2SmiOperands(masm);
594 switch (op_) {
595 case Token::ADD: __ addsd(xmm0, xmm1); break;
596 case Token::SUB: __ subsd(xmm0, xmm1); break;
597 case Token::MUL: __ mulsd(xmm0, xmm1); break;
598 case Token::DIV: __ divsd(xmm0, xmm1); break;
599 default: UNREACHABLE();
600 }
601 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
602 __ movq(rax, rbx);
603 GenerateReturn(masm);
604 }
605 default:
606 break;
607 }
608
609 // 6. Non-smi operands, fall out to the non-smi code with the operands in
610 // rdx and rax.
611 Comment done_comment(masm, "-- Enter non-smi code");
612 __ bind(&not_smis);
613
614 switch (op_) {
615 case Token::DIV:
616 case Token::MOD:
617 // Operands are in rax, rbx at this point.
618 __ movq(rdx, rax);
619 __ movq(rax, rbx);
620 break;
621
622 case Token::BIT_OR:
623 // Right operand is saved in rcx and rax was destroyed by the smi
624 // operation.
625 __ movq(rax, rcx);
626 break;
627
628 default:
629 break;
630 }
631}
632
633
634void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
635 Label call_runtime;
636
637 if (ShouldGenerateSmiCode()) {
638 GenerateSmiCode(masm, &call_runtime);
639 } else if (op_ != Token::MOD) {
640 if (!HasArgsInRegisters()) {
641 GenerateLoadArguments(masm);
642 }
643 }
644 // Floating point case.
645 if (ShouldGenerateFPCode()) {
646 switch (op_) {
647 case Token::ADD:
648 case Token::SUB:
649 case Token::MUL:
650 case Token::DIV: {
651 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
652 HasSmiCodeInStub()) {
653 // Execution reaches this point when the first non-smi argument occurs
654 // (and only if smi code is generated). This is the right moment to
655 // patch to HEAP_NUMBERS state. The transition is attempted only for
656 // the four basic operations. The stub stays in the DEFAULT state
657 // forever for all other operations (also if smi code is skipped).
658 GenerateTypeTransition(masm);
659 break;
660 }
661
662 Label not_floats;
663 // rax: y
664 // rdx: x
665 if (static_operands_type_.IsNumber()) {
666 if (FLAG_debug_code) {
667 // Assert at runtime that inputs are only numbers.
668 __ AbortIfNotNumber(rdx);
669 __ AbortIfNotNumber(rax);
670 }
671 FloatingPointHelper::LoadSSE2NumberOperands(masm);
672 } else {
673 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &call_runtime);
674 }
675
676 switch (op_) {
677 case Token::ADD: __ addsd(xmm0, xmm1); break;
678 case Token::SUB: __ subsd(xmm0, xmm1); break;
679 case Token::MUL: __ mulsd(xmm0, xmm1); break;
680 case Token::DIV: __ divsd(xmm0, xmm1); break;
681 default: UNREACHABLE();
682 }
683 // Allocate a heap number, if needed.
684 Label skip_allocation;
685 OverwriteMode mode = mode_;
686 if (HasArgsReversed()) {
687 if (mode == OVERWRITE_RIGHT) {
688 mode = OVERWRITE_LEFT;
689 } else if (mode == OVERWRITE_LEFT) {
690 mode = OVERWRITE_RIGHT;
691 }
692 }
693 switch (mode) {
694 case OVERWRITE_LEFT:
695 __ JumpIfNotSmi(rdx, &skip_allocation);
696 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
697 __ movq(rdx, rbx);
698 __ bind(&skip_allocation);
699 __ movq(rax, rdx);
700 break;
701 case OVERWRITE_RIGHT:
702 // If the argument in rax is already an object, we skip the
703 // allocation of a heap number.
704 __ JumpIfNotSmi(rax, &skip_allocation);
705 // Fall through!
706 case NO_OVERWRITE:
707 // Allocate a heap number for the result. Keep rax and rdx intact
708 // for the possible runtime call.
709 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
710 __ movq(rax, rbx);
711 __ bind(&skip_allocation);
712 break;
713 default: UNREACHABLE();
714 }
715 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
716 GenerateReturn(masm);
717 __ bind(&not_floats);
718 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
719 !HasSmiCodeInStub()) {
720 // Execution reaches this point when the first non-number argument
721 // occurs (and only if smi code is skipped from the stub, otherwise
722 // the patching has already been done earlier in this case branch).
723 // A perfect moment to try patching to STRINGS for ADD operation.
724 if (op_ == Token::ADD) {
725 GenerateTypeTransition(masm);
726 }
727 }
728 break;
729 }
730 case Token::MOD: {
731 // For MOD we go directly to runtime in the non-smi case.
732 break;
733 }
734 case Token::BIT_OR:
735 case Token::BIT_AND:
736 case Token::BIT_XOR:
737 case Token::SAR:
738 case Token::SHL:
739 case Token::SHR: {
740 Label skip_allocation, non_smi_shr_result;
741 Register heap_number_map = r9;
742 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
743 if (static_operands_type_.IsNumber()) {
744 if (FLAG_debug_code) {
745 // Assert at runtime that inputs are only numbers.
746 __ AbortIfNotNumber(rdx);
747 __ AbortIfNotNumber(rax);
748 }
749 FloatingPointHelper::LoadNumbersAsIntegers(masm);
750 } else {
751 FloatingPointHelper::LoadAsIntegers(masm,
752 &call_runtime,
753 heap_number_map);
754 }
755 switch (op_) {
756 case Token::BIT_OR: __ orl(rax, rcx); break;
757 case Token::BIT_AND: __ andl(rax, rcx); break;
758 case Token::BIT_XOR: __ xorl(rax, rcx); break;
759 case Token::SAR: __ sarl_cl(rax); break;
760 case Token::SHL: __ shll_cl(rax); break;
761 case Token::SHR: {
762 __ shrl_cl(rax);
763 // Check if result is negative. This can only happen for a shift
764 // by zero.
765 __ testl(rax, rax);
766 __ j(negative, &non_smi_shr_result);
767 break;
768 }
769 default: UNREACHABLE();
770 }
771
772 STATIC_ASSERT(kSmiValueSize == 32);
773 // Tag smi result and return.
774 __ Integer32ToSmi(rax, rax);
775 GenerateReturn(masm);
776
777 // All bit-ops except SHR return a signed int32 that can be
778 // returned immediately as a smi.
779 // We might need to allocate a HeapNumber if we shift a negative
780 // number right by zero (i.e., convert to UInt32).
781 if (op_ == Token::SHR) {
782 ASSERT(non_smi_shr_result.is_linked());
783 __ bind(&non_smi_shr_result);
784 // Allocate a heap number if needed.
785 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
786 switch (mode_) {
787 case OVERWRITE_LEFT:
788 case OVERWRITE_RIGHT:
789 // If the operand was an object, we skip the
790 // allocation of a heap number.
791 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
792 1 * kPointerSize : 2 * kPointerSize));
793 __ JumpIfNotSmi(rax, &skip_allocation);
794 // Fall through!
795 case NO_OVERWRITE:
796 // Allocate heap number in new space.
797 // Not using AllocateHeapNumber macro in order to reuse
798 // already loaded heap_number_map.
799 __ AllocateInNewSpace(HeapNumber::kSize,
800 rax,
801 rcx,
802 no_reg,
803 &call_runtime,
804 TAG_OBJECT);
805 // Set the map.
806 if (FLAG_debug_code) {
807 __ AbortIfNotRootValue(heap_number_map,
808 Heap::kHeapNumberMapRootIndex,
809 "HeapNumberMap register clobbered.");
810 }
811 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
812 heap_number_map);
813 __ bind(&skip_allocation);
814 break;
815 default: UNREACHABLE();
816 }
817 // Store the result in the HeapNumber and return.
818 __ cvtqsi2sd(xmm0, rbx);
819 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
820 GenerateReturn(masm);
821 }
822
823 break;
824 }
825 default: UNREACHABLE(); break;
826 }
827 }
828
829 // If all else fails, use the runtime system to get the correct
830 // result. If arguments was passed in registers now place them on the
831 // stack in the correct order below the return address.
832 __ bind(&call_runtime);
833
834 if (HasArgsInRegisters()) {
835 GenerateRegisterArgsPush(masm);
836 }
837
838 switch (op_) {
839 case Token::ADD: {
840 // Registers containing left and right operands respectively.
841 Register lhs, rhs;
842
843 if (HasArgsReversed()) {
844 lhs = rax;
845 rhs = rdx;
846 } else {
847 lhs = rdx;
848 rhs = rax;
849 }
850
851 // Test for string arguments before calling runtime.
852 Label not_strings, both_strings, not_string1, string1, string1_smi2;
853
854 // If this stub has already generated FP-specific code then the arguments
855 // are already in rdx and rax.
856 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
857 GenerateLoadArguments(masm);
858 }
859
860 Condition is_smi;
861 is_smi = masm->CheckSmi(lhs);
862 __ j(is_smi, &not_string1);
863 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
864 __ j(above_equal, &not_string1);
865
866 // First argument is a a string, test second.
867 is_smi = masm->CheckSmi(rhs);
868 __ j(is_smi, &string1_smi2);
869 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
870 __ j(above_equal, &string1);
871
872 // First and second argument are strings.
873 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
874 __ TailCallStub(&string_add_stub);
875
876 __ bind(&string1_smi2);
877 // First argument is a string, second is a smi. Try to lookup the number
878 // string for the smi in the number string cache.
879 NumberToStringStub::GenerateLookupNumberStringCache(
880 masm, rhs, rbx, rcx, r8, true, &string1);
881
882 // Replace second argument on stack and tailcall string add stub to make
883 // the result.
884 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
885 __ TailCallStub(&string_add_stub);
886
887 // Only first argument is a string.
888 __ bind(&string1);
889 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
890
891 // First argument was not a string, test second.
892 __ bind(&not_string1);
893 is_smi = masm->CheckSmi(rhs);
894 __ j(is_smi, &not_strings);
895 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
896 __ j(above_equal, &not_strings);
897
898 // Only second argument is a string.
899 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
900
901 __ bind(&not_strings);
902 // Neither argument is a string.
903 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
904 break;
905 }
906 case Token::SUB:
907 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
908 break;
909 case Token::MUL:
910 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
911 break;
912 case Token::DIV:
913 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
914 break;
915 case Token::MOD:
916 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
917 break;
918 case Token::BIT_OR:
919 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
920 break;
921 case Token::BIT_AND:
922 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
923 break;
924 case Token::BIT_XOR:
925 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
926 break;
927 case Token::SAR:
928 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
929 break;
930 case Token::SHL:
931 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
932 break;
933 case Token::SHR:
934 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
935 break;
936 default:
937 UNREACHABLE();
938 }
939}
940
941
942void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
943 ASSERT(!HasArgsInRegisters());
944 __ movq(rax, Operand(rsp, 1 * kPointerSize));
945 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
946}
947
948
949void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
950 // If arguments are not passed in registers remove them from the stack before
951 // returning.
952 if (!HasArgsInRegisters()) {
953 __ ret(2 * kPointerSize); // Remove both operands
954 } else {
955 __ ret(0);
956 }
957}
958
959
960void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
961 ASSERT(HasArgsInRegisters());
962 __ pop(rcx);
963 if (HasArgsReversed()) {
964 __ push(rax);
965 __ push(rdx);
966 } else {
967 __ push(rdx);
968 __ push(rax);
969 }
970 __ push(rcx);
971}
972
973
974void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
975 Label get_result;
976
977 // Ensure the operands are on the stack.
978 if (HasArgsInRegisters()) {
979 GenerateRegisterArgsPush(masm);
980 }
981
982 // Left and right arguments are already on stack.
983 __ pop(rcx); // Save the return address.
984
985 // Push this stub's key.
986 __ Push(Smi::FromInt(MinorKey()));
987
988 // Although the operation and the type info are encoded into the key,
989 // the encoding is opaque, so push them too.
990 __ Push(Smi::FromInt(op_));
991
992 __ Push(Smi::FromInt(runtime_operands_type_));
993
994 __ push(rcx); // The return address.
995
996 // Perform patching to an appropriate fast case and return the result.
997 __ TailCallExternalReference(
998 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
999 5,
1000 1);
1001}
1002
1003
1004Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1005 GenericBinaryOpStub stub(key, type_info);
1006 return stub.GetCode();
1007}
1008
1009
Ben Murdochb0fe1622011-05-05 13:52:32 +01001010Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1011 TRBinaryOpIC::TypeInfo type_info,
1012 TRBinaryOpIC::TypeInfo result_type_info) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001013 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1014 return stub.GetCode();
1015}
1016
1017
1018void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1019 __ pop(rcx); // Save return address.
1020 __ push(rdx);
1021 __ push(rax);
1022 // Left and right arguments are now on top.
1023 // Push this stub's key. Although the operation and the type info are
1024 // encoded into the key, the encoding is opaque, so push them too.
1025 __ Push(Smi::FromInt(MinorKey()));
1026 __ Push(Smi::FromInt(op_));
1027 __ Push(Smi::FromInt(operands_type_));
1028
1029 __ push(rcx); // Push return address.
1030
1031 // Patch the caller to an appropriate specialized stub and return the
1032 // operation result to the caller of the stub.
1033 __ TailCallExternalReference(
1034 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
1035 5,
1036 1);
1037}
1038
1039
Ben Murdoch086aeea2011-05-13 15:57:08 +01001040void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1041 switch (operands_type_) {
1042 case TRBinaryOpIC::UNINITIALIZED:
1043 GenerateTypeTransition(masm);
1044 break;
1045 case TRBinaryOpIC::SMI:
1046 GenerateSmiStub(masm);
1047 break;
1048 case TRBinaryOpIC::INT32:
Steve Block1e0659c2011-05-24 12:43:12 +01001049 UNREACHABLE();
1050 // The int32 case is identical to the Smi case. We avoid creating this
1051 // ic state on x64.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001052 break;
1053 case TRBinaryOpIC::HEAP_NUMBER:
1054 GenerateHeapNumberStub(masm);
1055 break;
1056 case TRBinaryOpIC::STRING:
1057 GenerateStringStub(masm);
1058 break;
1059 case TRBinaryOpIC::GENERIC:
1060 GenerateGeneric(masm);
1061 break;
1062 default:
1063 UNREACHABLE();
1064 }
1065}
1066
1067
1068const char* TypeRecordingBinaryOpStub::GetName() {
1069 if (name_ != NULL) return name_;
1070 const int kMaxNameLength = 100;
1071 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1072 if (name_ == NULL) return "OOM";
1073 const char* op_name = Token::Name(op_);
1074 const char* overwrite_name;
1075 switch (mode_) {
1076 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1077 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1078 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1079 default: overwrite_name = "UnknownOverwrite"; break;
1080 }
1081
1082 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1083 "TypeRecordingBinaryOpStub_%s_%s_%s",
1084 op_name,
1085 overwrite_name,
1086 TRBinaryOpIC::GetName(operands_type_));
1087 return name_;
1088}
1089
1090
1091void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1092 Label* slow,
1093 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
Steve Block1e0659c2011-05-24 12:43:12 +01001094
1095 // We only generate heapnumber answers for overflowing calculations
1096 // for the four basic arithmetic operations.
1097 bool generate_inline_heapnumber_results =
1098 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1099 (op_ == Token::ADD || op_ == Token::SUB ||
1100 op_ == Token::MUL || op_ == Token::DIV);
1101
1102 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax.
1103 Register left = rdx;
1104 Register right = rax;
1105
1106
1107 // Smi check of both operands. If op is BIT_OR, the check is delayed
1108 // until after the OR operation.
1109 Label not_smis;
1110 Label use_fp_on_smis;
1111 Label restore_MOD_registers; // Only used if op_ == Token::MOD.
1112
1113 if (op_ != Token::BIT_OR) {
1114 Comment smi_check_comment(masm, "-- Smi check arguments");
1115 __ JumpIfNotBothSmi(left, right, &not_smis);
1116 }
1117
1118 // Perform the operation.
1119 Comment perform_smi(masm, "-- Perform smi operation");
1120 switch (op_) {
1121 case Token::ADD:
1122 ASSERT(right.is(rax));
1123 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
1124 break;
1125
1126 case Token::SUB:
1127 __ SmiSub(left, left, right, &use_fp_on_smis);
1128 __ movq(rax, left);
1129 break;
1130
1131 case Token::MUL:
1132 ASSERT(right.is(rax));
1133 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
1134 break;
1135
1136 case Token::DIV:
1137 // SmiDiv will not accept left in rdx or right in rax.
1138 left = rcx;
1139 right = rbx;
1140 __ movq(rbx, rax);
1141 __ movq(rcx, rdx);
1142 __ SmiDiv(rax, left, right, &use_fp_on_smis);
1143 break;
1144
1145 case Token::MOD:
1146 // SmiMod will not accept left in rdx or right in rax.
1147 left = rcx;
1148 right = rbx;
1149 __ movq(rbx, rax);
1150 __ movq(rcx, rdx);
1151 __ SmiMod(rax, left, right, &use_fp_on_smis);
1152 break;
1153
1154 case Token::BIT_OR: {
1155 ASSERT(right.is(rax));
1156 __ movq(rcx, right); // Save the right operand.
1157 __ SmiOr(right, right, left); // BIT_OR is commutative.
1158 __ JumpIfNotSmi(right, &not_smis); // Test delayed until after BIT_OR.
1159 break;
1160 }
1161 case Token::BIT_XOR:
1162 ASSERT(right.is(rax));
1163 __ SmiXor(right, right, left); // BIT_XOR is commutative.
1164 break;
1165
1166 case Token::BIT_AND:
1167 ASSERT(right.is(rax));
1168 __ SmiAnd(right, right, left); // BIT_AND is commutative.
1169 break;
1170
1171 case Token::SHL:
1172 __ SmiShiftLeft(left, left, right);
1173 __ movq(rax, left);
1174 break;
1175
1176 case Token::SAR:
1177 __ SmiShiftArithmeticRight(left, left, right);
1178 __ movq(rax, left);
1179 break;
1180
1181 case Token::SHR:
1182 __ SmiShiftLogicalRight(left, left, right, &not_smis);
1183 __ movq(rax, left);
1184 break;
1185
1186 default:
1187 UNREACHABLE();
1188 }
1189
1190 // 5. Emit return of result in rax. Some operations have registers pushed.
1191 __ ret(0);
1192
1193 // 6. For some operations emit inline code to perform floating point
1194 // operations on known smis (e.g., if the result of the operation
1195 // overflowed the smi range).
1196 __ bind(&use_fp_on_smis);
1197 if (op_ == Token::DIV || op_ == Token::MOD) {
1198 // Restore left and right to rdx and rax.
1199 __ movq(rdx, rcx);
1200 __ movq(rax, rbx);
1201 }
1202
1203
1204 if (generate_inline_heapnumber_results) {
1205 __ AllocateHeapNumber(rcx, rbx, slow);
1206 Comment perform_float(masm, "-- Perform float operation on smis");
1207 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1208 switch (op_) {
1209 case Token::ADD: __ addsd(xmm0, xmm1); break;
1210 case Token::SUB: __ subsd(xmm0, xmm1); break;
1211 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1212 case Token::DIV: __ divsd(xmm0, xmm1); break;
1213 default: UNREACHABLE();
1214 }
1215 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
1216 __ movq(rax, rcx);
1217 __ ret(0);
1218 }
1219
1220 // 7. Non-smi operands reach the end of the code generated by
1221 // GenerateSmiCode, and fall through to subsequent code,
1222 // with the operands in rdx and rax.
1223 Comment done_comment(masm, "-- Enter non-smi code");
1224 __ bind(&not_smis);
1225 if (op_ == Token::BIT_OR) {
1226 __ movq(right, rcx);
1227 }
1228}
1229
1230
1231void TypeRecordingBinaryOpStub::GenerateFloatingPointCode(
1232 MacroAssembler* masm,
1233 Label* allocation_failure,
1234 Label* non_numeric_failure) {
1235 switch (op_) {
1236 case Token::ADD:
1237 case Token::SUB:
1238 case Token::MUL:
1239 case Token::DIV: {
1240 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1241
1242 switch (op_) {
1243 case Token::ADD: __ addsd(xmm0, xmm1); break;
1244 case Token::SUB: __ subsd(xmm0, xmm1); break;
1245 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1246 case Token::DIV: __ divsd(xmm0, xmm1); break;
1247 default: UNREACHABLE();
1248 }
1249 GenerateHeapResultAllocation(masm, allocation_failure);
1250 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1251 __ ret(0);
1252 break;
1253 }
1254 case Token::MOD: {
1255 // For MOD we jump to the allocation_failure label, to call runtime.
1256 __ jmp(allocation_failure);
1257 break;
1258 }
1259 case Token::BIT_OR:
1260 case Token::BIT_AND:
1261 case Token::BIT_XOR:
1262 case Token::SAR:
1263 case Token::SHL:
1264 case Token::SHR: {
1265 Label non_smi_shr_result;
1266 Register heap_number_map = r9;
1267 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1268 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1269 heap_number_map);
1270 switch (op_) {
1271 case Token::BIT_OR: __ orl(rax, rcx); break;
1272 case Token::BIT_AND: __ andl(rax, rcx); break;
1273 case Token::BIT_XOR: __ xorl(rax, rcx); break;
1274 case Token::SAR: __ sarl_cl(rax); break;
1275 case Token::SHL: __ shll_cl(rax); break;
1276 case Token::SHR: {
1277 __ shrl_cl(rax);
1278 // Check if result is negative. This can only happen for a shift
1279 // by zero.
1280 __ testl(rax, rax);
1281 __ j(negative, &non_smi_shr_result);
1282 break;
1283 }
1284 default: UNREACHABLE();
1285 }
1286 STATIC_ASSERT(kSmiValueSize == 32);
1287 // Tag smi result and return.
1288 __ Integer32ToSmi(rax, rax);
1289 __ Ret();
1290
1291 // Logical shift right can produce an unsigned int32 that is not
1292 // an int32, and so is not in the smi range. Allocate a heap number
1293 // in that case.
1294 if (op_ == Token::SHR) {
1295 __ bind(&non_smi_shr_result);
1296 Label allocation_failed;
1297 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
1298 // Allocate heap number in new space.
1299 // Not using AllocateHeapNumber macro in order to reuse
1300 // already loaded heap_number_map.
1301 __ AllocateInNewSpace(HeapNumber::kSize,
1302 rax,
1303 rcx,
1304 no_reg,
1305 &allocation_failed,
1306 TAG_OBJECT);
1307 // Set the map.
1308 if (FLAG_debug_code) {
1309 __ AbortIfNotRootValue(heap_number_map,
1310 Heap::kHeapNumberMapRootIndex,
1311 "HeapNumberMap register clobbered.");
1312 }
1313 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1314 heap_number_map);
1315 __ cvtqsi2sd(xmm0, rbx);
1316 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1317 __ Ret();
1318
1319 __ bind(&allocation_failed);
1320 // We need tagged values in rdx and rax for the following code,
1321 // not int32 in rax and rcx.
1322 __ Integer32ToSmi(rax, rcx);
1323 __ Integer32ToSmi(rdx, rax);
1324 __ jmp(allocation_failure);
1325 }
1326 break;
1327 }
1328 default: UNREACHABLE(); break;
1329 }
1330 // No fall-through from this generated code.
1331 if (FLAG_debug_code) {
1332 __ Abort("Unexpected fall-through in "
1333 "TypeRecordingBinaryStub::GenerateFloatingPointCode.");
1334 }
1335}
1336
1337
1338void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
1339 GenerateRegisterArgsPush(masm);
1340 // Registers containing left and right operands respectively.
1341 Register lhs = rdx;
1342 Register rhs = rax;
1343
1344 // Test for string arguments before calling runtime.
1345 Label not_strings, both_strings, not_string1, string1, string1_smi2;
1346
1347 __ JumpIfNotString(lhs, r8, &not_string1);
1348
1349 // First argument is a a string, test second.
1350 __ JumpIfSmi(rhs, &string1_smi2);
1351 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
1352 __ j(above_equal, &string1);
1353
1354 // First and second argument are strings.
1355 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
1356 __ TailCallStub(&string_add_stub);
1357
1358 __ bind(&string1_smi2);
1359 // First argument is a string, second is a smi. Try to lookup the number
1360 // string for the smi in the number string cache.
1361 NumberToStringStub::GenerateLookupNumberStringCache(
1362 masm, rhs, rbx, rcx, r8, true, &string1);
1363
1364 // Replace second argument on stack and tailcall string add stub to make
1365 // the result.
1366 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
1367 __ TailCallStub(&string_add_stub);
1368
1369 // Only first argument is a string.
1370 __ bind(&string1);
1371 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
1372
1373 // First argument was not a string, test second.
1374 __ bind(&not_string1);
1375 __ JumpIfNotString(rhs, rhs, &not_strings);
1376
1377 // Only second argument is a string.
1378 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
1379
1380 __ bind(&not_strings);
1381 // Neither argument is a string.
1382 // Pop arguments, because CallRuntimeCode wants to push them again.
1383 __ pop(rcx);
1384 __ pop(rax);
1385 __ pop(rdx);
1386 __ push(rcx);
1387}
1388
1389
1390void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1391 GenerateRegisterArgsPush(masm);
1392 switch (op_) {
1393 case Token::ADD:
1394 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1395 break;
1396 case Token::SUB:
1397 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1398 break;
1399 case Token::MUL:
1400 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1401 break;
1402 case Token::DIV:
1403 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1404 break;
1405 case Token::MOD:
1406 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1407 break;
1408 case Token::BIT_OR:
1409 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1410 break;
1411 case Token::BIT_AND:
1412 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1413 break;
1414 case Token::BIT_XOR:
1415 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1416 break;
1417 case Token::SAR:
1418 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1419 break;
1420 case Token::SHL:
1421 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1422 break;
1423 case Token::SHR:
1424 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1425 break;
1426 default:
1427 UNREACHABLE();
1428 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001429}
1430
1431
1432void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001433 Label not_smi;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001434
Steve Block1e0659c2011-05-24 12:43:12 +01001435 GenerateSmiCode(masm, &not_smi, NO_HEAPNUMBER_RESULTS);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001436
Steve Block1e0659c2011-05-24 12:43:12 +01001437 __ bind(&not_smi);
1438 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001439}
1440
1441
1442void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001443 ASSERT(op_ == Token::ADD);
1444 GenerateStringAddCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001445
Steve Block1e0659c2011-05-24 12:43:12 +01001446 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001447}
1448
1449
1450void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001451 Label gc_required, not_number;
1452 GenerateFloatingPointCode(masm, &gc_required, &not_number);
1453
1454 __ bind(&not_number);
1455 GenerateTypeTransition(masm);
1456
1457 __ bind(&gc_required);
1458 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001459}
1460
1461
1462void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001463 Label call_runtime, call_string_add_or_runtime;
1464
1465 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1466
1467 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1468
1469 __ bind(&call_string_add_or_runtime);
1470 if (op_ == Token::ADD) {
1471 GenerateStringAddCode(masm);
1472 }
1473
1474 __ bind(&call_runtime);
1475 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001476}
1477
1478
1479void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
1480 MacroAssembler* masm,
1481 Label* alloc_failure) {
Steve Block1e0659c2011-05-24 12:43:12 +01001482 Label skip_allocation;
1483 OverwriteMode mode = mode_;
1484 switch (mode) {
1485 case OVERWRITE_LEFT: {
1486 // If the argument in rdx is already an object, we skip the
1487 // allocation of a heap number.
1488 __ JumpIfNotSmi(rdx, &skip_allocation);
1489 // Allocate a heap number for the result. Keep eax and edx intact
1490 // for the possible runtime call.
1491 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1492 // Now rdx can be overwritten losing one of the arguments as we are
1493 // now done and will not need it any more.
1494 __ movq(rdx, rbx);
1495 __ bind(&skip_allocation);
1496 // Use object in rdx as a result holder
1497 __ movq(rax, rdx);
1498 break;
1499 }
1500 case OVERWRITE_RIGHT:
1501 // If the argument in rax is already an object, we skip the
1502 // allocation of a heap number.
1503 __ JumpIfNotSmi(rax, &skip_allocation);
1504 // Fall through!
1505 case NO_OVERWRITE:
1506 // Allocate a heap number for the result. Keep rax and rdx intact
1507 // for the possible runtime call.
1508 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1509 // Now rax can be overwritten losing one of the arguments as we are
1510 // now done and will not need it any more.
1511 __ movq(rax, rbx);
1512 __ bind(&skip_allocation);
1513 break;
1514 default: UNREACHABLE();
1515 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001516}
1517
1518
1519void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1520 __ pop(rcx);
1521 __ push(rdx);
1522 __ push(rax);
1523 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001524}
1525
1526
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001527void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1528 // Input on stack:
1529 // rsp[8]: argument (should be number).
1530 // rsp[0]: return address.
1531 Label runtime_call;
1532 Label runtime_call_clear_stack;
1533 Label input_not_smi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001534 NearLabel loaded;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001535 // Test that rax is a number.
1536 __ movq(rax, Operand(rsp, kPointerSize));
1537 __ JumpIfNotSmi(rax, &input_not_smi);
1538 // Input is a smi. Untag and load it onto the FPU stack.
1539 // Then load the bits of the double into rbx.
1540 __ SmiToInteger32(rax, rax);
1541 __ subq(rsp, Immediate(kPointerSize));
1542 __ cvtlsi2sd(xmm1, rax);
1543 __ movsd(Operand(rsp, 0), xmm1);
1544 __ movq(rbx, xmm1);
1545 __ movq(rdx, xmm1);
1546 __ fld_d(Operand(rsp, 0));
1547 __ addq(rsp, Immediate(kPointerSize));
1548 __ jmp(&loaded);
1549
1550 __ bind(&input_not_smi);
1551 // Check if input is a HeapNumber.
1552 __ Move(rbx, Factory::heap_number_map());
1553 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1554 __ j(not_equal, &runtime_call);
1555 // Input is a HeapNumber. Push it on the FPU stack and load its
1556 // bits into rbx.
1557 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1558 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1559 __ movq(rdx, rbx);
1560 __ bind(&loaded);
1561 // ST[0] == double value
1562 // rbx = bits of double value.
1563 // rdx = also bits of double value.
1564 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
1565 // h = h0 = bits ^ (bits >> 32);
1566 // h ^= h >> 16;
1567 // h ^= h >> 8;
1568 // h = h & (cacheSize - 1);
1569 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
1570 __ sar(rdx, Immediate(32));
1571 __ xorl(rdx, rbx);
1572 __ movl(rcx, rdx);
1573 __ movl(rax, rdx);
1574 __ movl(rdi, rdx);
1575 __ sarl(rdx, Immediate(8));
1576 __ sarl(rcx, Immediate(16));
1577 __ sarl(rax, Immediate(24));
1578 __ xorl(rcx, rdx);
1579 __ xorl(rax, rdi);
1580 __ xorl(rcx, rax);
1581 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
1582 __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
1583
1584 // ST[0] == double value.
1585 // rbx = bits of double value.
1586 // rcx = TranscendentalCache::hash(double value).
1587 __ movq(rax, ExternalReference::transcendental_cache_array_address());
1588 // rax points to cache array.
1589 __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
1590 // rax points to the cache for the type type_.
1591 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1592 __ testq(rax, rax);
1593 __ j(zero, &runtime_call_clear_stack);
1594#ifdef DEBUG
1595 // Check that the layout of cache elements match expectations.
1596 { // NOLINT - doesn't like a single brace on a line.
1597 TranscendentalCache::Element test_elem[2];
1598 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1599 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1600 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1601 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1602 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1603 // Two uint_32's and a pointer per element.
1604 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1605 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1606 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1607 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1608 }
1609#endif
1610 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1611 __ addl(rcx, rcx);
1612 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1613 // Check if cache matches: Double value is stored in uint32_t[2] array.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001614 NearLabel cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001615 __ cmpq(rbx, Operand(rcx, 0));
1616 __ j(not_equal, &cache_miss);
1617 // Cache hit!
1618 __ movq(rax, Operand(rcx, 2 * kIntSize));
1619 __ fstp(0); // Clear FPU stack.
1620 __ ret(kPointerSize);
1621
1622 __ bind(&cache_miss);
1623 // Update cache with new value.
1624 Label nan_result;
1625 GenerateOperation(masm, &nan_result);
1626 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
1627 __ movq(Operand(rcx, 0), rbx);
1628 __ movq(Operand(rcx, 2 * kIntSize), rax);
1629 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
1630 __ ret(kPointerSize);
1631
1632 __ bind(&runtime_call_clear_stack);
1633 __ fstp(0);
1634 __ bind(&runtime_call);
1635 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
1636
1637 __ bind(&nan_result);
1638 __ fstp(0); // Remove argument from FPU stack.
1639 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1640 __ movq(Operand(rcx, 0), rbx);
1641 __ movq(Operand(rcx, 2 * kIntSize), rax);
1642 __ ret(kPointerSize);
1643}
1644
1645
1646Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1647 switch (type_) {
1648 // Add more cases when necessary.
1649 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1650 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001651 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001652 default:
1653 UNIMPLEMENTED();
1654 return Runtime::kAbort;
1655 }
1656}
1657
1658
1659void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
1660 Label* on_nan_result) {
1661 // Registers:
1662 // rbx: Bits of input double. Must be preserved.
1663 // rcx: Pointer to cache entry. Must be preserved.
1664 // st(0): Input double
1665 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001666 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
1667 // Both fsin and fcos require arguments in the range +/-2^63 and
1668 // return NaN for infinities and NaN. They can share all code except
1669 // the actual fsin/fcos operation.
1670 Label in_range;
1671 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
1672 // work. We must reduce it to the appropriate range.
1673 __ movq(rdi, rbx);
1674 // Move exponent and sign bits to low bits.
1675 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
1676 // Remove sign bit.
1677 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
1678 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
1679 __ cmpl(rdi, Immediate(supported_exponent_limit));
1680 __ j(below, &in_range);
1681 // Check for infinity and NaN. Both return NaN for sin.
1682 __ cmpl(rdi, Immediate(0x7ff));
1683 __ j(equal, on_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001684
Ben Murdochb0fe1622011-05-05 13:52:32 +01001685 // Use fpmod to restrict argument to the range +/-2*PI.
1686 __ fldpi();
1687 __ fadd(0);
1688 __ fld(1);
1689 // FPU Stack: input, 2*pi, input.
1690 {
1691 Label no_exceptions;
1692 __ fwait();
1693 __ fnstsw_ax();
1694 // Clear if Illegal Operand or Zero Division exceptions are set.
1695 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1696 __ j(zero, &no_exceptions);
1697 __ fnclex();
1698 __ bind(&no_exceptions);
1699 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001700
Ben Murdochb0fe1622011-05-05 13:52:32 +01001701 // Compute st(0) % st(1)
1702 {
1703 NearLabel partial_remainder_loop;
1704 __ bind(&partial_remainder_loop);
1705 __ fprem1();
1706 __ fwait();
1707 __ fnstsw_ax();
1708 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1709 // If C2 is set, computation only has partial result. Loop to
1710 // continue computation.
1711 __ j(not_zero, &partial_remainder_loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001712 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001713 // FPU Stack: input, 2*pi, input % 2*pi
1714 __ fstp(2);
1715 // FPU Stack: input % 2*pi, 2*pi,
1716 __ fstp(0);
1717 // FPU Stack: input % 2*pi
1718 __ bind(&in_range);
1719 switch (type_) {
1720 case TranscendentalCache::SIN:
1721 __ fsin();
1722 break;
1723 case TranscendentalCache::COS:
1724 __ fcos();
1725 break;
1726 default:
1727 UNREACHABLE();
1728 }
1729 __ bind(&done);
1730 } else {
1731 ASSERT(type_ == TranscendentalCache::LOG);
1732 __ fldln2();
1733 __ fxch();
1734 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001735 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001736}
1737
1738
1739// Get the integer part of a heap number.
1740// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
1741void IntegerConvert(MacroAssembler* masm,
1742 Register result,
1743 Register source) {
1744 // Result may be rcx. If result and source are the same register, source will
1745 // be overwritten.
1746 ASSERT(!result.is(rdi) && !result.is(rbx));
1747 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
1748 // cvttsd2si (32-bit version) directly.
1749 Register double_exponent = rbx;
1750 Register double_value = rdi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001751 NearLabel done, exponent_63_plus;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001752 // Get double and extract exponent.
1753 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
1754 // Clear result preemptively, in case we need to return zero.
1755 __ xorl(result, result);
1756 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
1757 // Double to remove sign bit, shift exponent down to least significant bits.
1758 // and subtract bias to get the unshifted, unbiased exponent.
1759 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
1760 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
1761 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
1762 // Check whether the exponent is too big for a 63 bit unsigned integer.
1763 __ cmpl(double_exponent, Immediate(63));
1764 __ j(above_equal, &exponent_63_plus);
1765 // Handle exponent range 0..62.
1766 __ cvttsd2siq(result, xmm0);
1767 __ jmp(&done);
1768
1769 __ bind(&exponent_63_plus);
1770 // Exponent negative or 63+.
1771 __ cmpl(double_exponent, Immediate(83));
1772 // If exponent negative or above 83, number contains no significant bits in
1773 // the range 0..2^31, so result is zero, and rcx already holds zero.
1774 __ j(above, &done);
1775
1776 // Exponent in rage 63..83.
1777 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
1778 // the least significant exponent-52 bits.
1779
1780 // Negate low bits of mantissa if value is negative.
1781 __ addq(double_value, double_value); // Move sign bit to carry.
1782 __ sbbl(result, result); // And convert carry to -1 in result register.
1783 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
1784 __ addl(double_value, result);
1785 // Do xor in opposite directions depending on where we want the result
1786 // (depending on whether result is rcx or not).
1787
1788 if (result.is(rcx)) {
1789 __ xorl(double_value, result);
1790 // Left shift mantissa by (exponent - mantissabits - 1) to save the
1791 // bits that have positional values below 2^32 (the extra -1 comes from the
1792 // doubling done above to move the sign bit into the carry flag).
1793 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1794 __ shll_cl(double_value);
1795 __ movl(result, double_value);
1796 } else {
1797 // As the then-branch, but move double-value to result before shifting.
1798 __ xorl(result, double_value);
1799 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1800 __ shll_cl(result);
1801 }
1802
1803 __ bind(&done);
1804}
1805
1806
1807// Input: rdx, rax are the left and right objects of a bit op.
1808// Output: rax, rcx are left and right integers for a bit op.
1809void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1810 // Check float operands.
1811 Label done;
1812 Label rax_is_smi;
1813 Label rax_is_object;
1814 Label rdx_is_object;
1815
1816 __ JumpIfNotSmi(rdx, &rdx_is_object);
1817 __ SmiToInteger32(rdx, rdx);
1818 __ JumpIfSmi(rax, &rax_is_smi);
1819
1820 __ bind(&rax_is_object);
1821 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx.
1822 __ jmp(&done);
1823
1824 __ bind(&rdx_is_object);
1825 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx.
1826 __ JumpIfNotSmi(rax, &rax_is_object);
1827 __ bind(&rax_is_smi);
1828 __ SmiToInteger32(rcx, rax);
1829
1830 __ bind(&done);
1831 __ movl(rax, rdx);
1832}
1833
1834
1835// Input: rdx, rax are the left and right objects of a bit op.
1836// Output: rax, rcx are left and right integers for a bit op.
Steve Block1e0659c2011-05-24 12:43:12 +01001837// Jump to conversion_failure: rdx and rax are unchanged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001838void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1839 Label* conversion_failure,
1840 Register heap_number_map) {
1841 // Check float operands.
1842 Label arg1_is_object, check_undefined_arg1;
1843 Label arg2_is_object, check_undefined_arg2;
1844 Label load_arg2, done;
1845
1846 __ JumpIfNotSmi(rdx, &arg1_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001847 __ SmiToInteger32(r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001848 __ jmp(&load_arg2);
1849
1850 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1851 __ bind(&check_undefined_arg1);
1852 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1853 __ j(not_equal, conversion_failure);
Steve Block1e0659c2011-05-24 12:43:12 +01001854 __ movl(r8, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001855 __ jmp(&load_arg2);
1856
1857 __ bind(&arg1_is_object);
1858 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1859 __ j(not_equal, &check_undefined_arg1);
Steve Block1e0659c2011-05-24 12:43:12 +01001860 // Get the untagged integer version of the rdx heap number in rcx.
1861 IntegerConvert(masm, r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001862
Steve Block1e0659c2011-05-24 12:43:12 +01001863 // Here r8 has the untagged integer, rax has a Smi or a heap number.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001864 __ bind(&load_arg2);
1865 // Test if arg2 is a Smi.
1866 __ JumpIfNotSmi(rax, &arg2_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001867 __ SmiToInteger32(rcx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001868 __ jmp(&done);
1869
1870 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1871 __ bind(&check_undefined_arg2);
1872 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1873 __ j(not_equal, conversion_failure);
1874 __ movl(rcx, Immediate(0));
1875 __ jmp(&done);
1876
1877 __ bind(&arg2_is_object);
1878 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1879 __ j(not_equal, &check_undefined_arg2);
1880 // Get the untagged integer version of the rax heap number in rcx.
1881 IntegerConvert(masm, rcx, rax);
1882 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01001883 __ movl(rax, r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001884}
1885
1886
1887void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1888 __ SmiToInteger32(kScratchRegister, rdx);
1889 __ cvtlsi2sd(xmm0, kScratchRegister);
1890 __ SmiToInteger32(kScratchRegister, rax);
1891 __ cvtlsi2sd(xmm1, kScratchRegister);
1892}
1893
1894
1895void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1896 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1897 // Load operand in rdx into xmm0.
1898 __ JumpIfSmi(rdx, &load_smi_rdx);
1899 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1900 // Load operand in rax into xmm1.
1901 __ JumpIfSmi(rax, &load_smi_rax);
1902 __ bind(&load_nonsmi_rax);
1903 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1904 __ jmp(&done);
1905
1906 __ bind(&load_smi_rdx);
1907 __ SmiToInteger32(kScratchRegister, rdx);
1908 __ cvtlsi2sd(xmm0, kScratchRegister);
1909 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1910
1911 __ bind(&load_smi_rax);
1912 __ SmiToInteger32(kScratchRegister, rax);
1913 __ cvtlsi2sd(xmm1, kScratchRegister);
1914
1915 __ bind(&done);
1916}
1917
1918
1919void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1920 Label* not_numbers) {
1921 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1922 // Load operand in rdx into xmm0, or branch to not_numbers.
1923 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
1924 __ JumpIfSmi(rdx, &load_smi_rdx);
1925 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
1926 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
1927 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1928 // Load operand in rax into xmm1, or branch to not_numbers.
1929 __ JumpIfSmi(rax, &load_smi_rax);
1930
1931 __ bind(&load_nonsmi_rax);
1932 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1933 __ j(not_equal, not_numbers);
1934 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1935 __ jmp(&done);
1936
1937 __ bind(&load_smi_rdx);
1938 __ SmiToInteger32(kScratchRegister, rdx);
1939 __ cvtlsi2sd(xmm0, kScratchRegister);
1940 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1941
1942 __ bind(&load_smi_rax);
1943 __ SmiToInteger32(kScratchRegister, rax);
1944 __ cvtlsi2sd(xmm1, kScratchRegister);
1945 __ bind(&done);
1946}
1947
1948
1949void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
1950 Label slow, done;
1951
1952 if (op_ == Token::SUB) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001953 if (include_smi_code_) {
1954 // Check whether the value is a smi.
1955 Label try_float;
1956 __ JumpIfNotSmi(rax, &try_float);
1957 if (negative_zero_ == kIgnoreNegativeZero) {
1958 __ SmiCompare(rax, Smi::FromInt(0));
1959 __ j(equal, &done);
1960 }
1961 __ SmiNeg(rax, rax, &done);
Ben Murdochf87a2032010-10-22 12:50:53 +01001962 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001963
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001964 // Try floating point case.
1965 __ bind(&try_float);
1966 } else if (FLAG_debug_code) {
1967 __ AbortIfSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001968 }
1969
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001970 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1971 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
1972 __ j(not_equal, &slow);
1973 // Operand is a float, negate its value by flipping sign bit.
1974 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
1975 __ movq(kScratchRegister, Immediate(0x01));
1976 __ shl(kScratchRegister, Immediate(63));
1977 __ xor_(rdx, kScratchRegister); // Flip sign.
1978 // rdx is value to store.
1979 if (overwrite_ == UNARY_OVERWRITE) {
1980 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
1981 } else {
1982 __ AllocateHeapNumber(rcx, rbx, &slow);
1983 // rcx: allocated 'empty' number
1984 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
1985 __ movq(rax, rcx);
1986 }
1987 } else if (op_ == Token::BIT_NOT) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001988 if (include_smi_code_) {
1989 Label try_float;
1990 __ JumpIfNotSmi(rax, &try_float);
1991 __ SmiNot(rax, rax);
1992 __ jmp(&done);
1993 // Try floating point case.
1994 __ bind(&try_float);
1995 } else if (FLAG_debug_code) {
1996 __ AbortIfSmi(rax);
1997 }
1998
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001999 // Check if the operand is a heap number.
2000 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
2001 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
2002 __ j(not_equal, &slow);
2003
2004 // Convert the heap number in rax to an untagged integer in rcx.
2005 IntegerConvert(masm, rax, rax);
2006
2007 // Do the bitwise operation and smi tag the result.
2008 __ notl(rax);
2009 __ Integer32ToSmi(rax, rax);
2010 }
2011
2012 // Return from the stub.
2013 __ bind(&done);
2014 __ StubReturn(1);
2015
2016 // Handle the slow case by jumping to the JavaScript builtin.
2017 __ bind(&slow);
2018 __ pop(rcx); // pop return address
2019 __ push(rax);
2020 __ push(rcx); // push return address
2021 switch (op_) {
2022 case Token::SUB:
2023 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
2024 break;
2025 case Token::BIT_NOT:
2026 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2027 break;
2028 default:
2029 UNREACHABLE();
2030 }
2031}
2032
2033
2034void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2035 // The key is in rdx and the parameter count is in rax.
2036
2037 // The displacement is used for skipping the frame pointer on the
2038 // stack. It is the offset of the last parameter (if any) relative
2039 // to the frame pointer.
2040 static const int kDisplacement = 1 * kPointerSize;
2041
2042 // Check that the key is a smi.
2043 Label slow;
2044 __ JumpIfNotSmi(rdx, &slow);
2045
2046 // Check if the calling frame is an arguments adaptor frame.
2047 Label adaptor;
2048 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2049 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
2050 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2051 __ j(equal, &adaptor);
2052
2053 // Check index against formal parameters count limit passed in
2054 // through register rax. Use unsigned comparison to get negative
2055 // check for free.
2056 __ cmpq(rdx, rax);
2057 __ j(above_equal, &slow);
2058
2059 // Read the argument from the stack and return it.
2060 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
2061 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
2062 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2063 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2064 __ Ret();
2065
2066 // Arguments adaptor case: Check index against actual arguments
2067 // limit found in the arguments adaptor frame. Use unsigned
2068 // comparison to get negative check for free.
2069 __ bind(&adaptor);
2070 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2071 __ cmpq(rdx, rcx);
2072 __ j(above_equal, &slow);
2073
2074 // Read the argument from the stack and return it.
2075 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
2076 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
2077 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2078 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2079 __ Ret();
2080
2081 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2082 // by calling the runtime system.
2083 __ bind(&slow);
2084 __ pop(rbx); // Return address.
2085 __ push(rdx);
2086 __ push(rbx);
2087 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2088}
2089
2090
2091void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2092 // rsp[0] : return address
2093 // rsp[8] : number of parameters
2094 // rsp[16] : receiver displacement
2095 // rsp[24] : function
2096
2097 // The displacement is used for skipping the return address and the
2098 // frame pointer on the stack. It is the offset of the last
2099 // parameter (if any) relative to the frame pointer.
2100 static const int kDisplacement = 2 * kPointerSize;
2101
2102 // Check if the calling frame is an arguments adaptor frame.
2103 Label adaptor_frame, try_allocate, runtime;
2104 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2105 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
2106 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2107 __ j(equal, &adaptor_frame);
2108
2109 // Get the length from the frame.
2110 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
2111 __ jmp(&try_allocate);
2112
2113 // Patch the arguments.length and the parameters pointer.
2114 __ bind(&adaptor_frame);
2115 __ SmiToInteger32(rcx,
2116 Operand(rdx,
2117 ArgumentsAdaptorFrameConstants::kLengthOffset));
2118 // Space on stack must already hold a smi.
2119 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
2120 // Do not clobber the length index for the indexing operation since
2121 // it is used compute the size for allocation later.
2122 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
2123 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2124
2125 // Try the new space allocation. Start out with computing the size of
2126 // the arguments object and the elements array.
2127 Label add_arguments_object;
2128 __ bind(&try_allocate);
2129 __ testl(rcx, rcx);
2130 __ j(zero, &add_arguments_object);
2131 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2132 __ bind(&add_arguments_object);
2133 __ addl(rcx, Immediate(Heap::kArgumentsObjectSize));
2134
2135 // Do the allocation of both objects in one go.
2136 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2137
2138 // Get the arguments boilerplate from the current (global) context.
2139 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
2140 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2141 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2142 __ movq(rdi, Operand(rdi, offset));
2143
2144 // Copy the JS object part.
2145 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
2146 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2147 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2148 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2149 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2150 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2151 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2152
2153 // Setup the callee in-object property.
2154 ASSERT(Heap::arguments_callee_index == 0);
2155 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2156 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
2157
2158 // Get the length (smi tagged) and set that as an in-object property too.
2159 ASSERT(Heap::arguments_length_index == 1);
2160 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
2161 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
2162
2163 // If there are no actual arguments, we're done.
2164 Label done;
2165 __ SmiTest(rcx);
2166 __ j(zero, &done);
2167
2168 // Get the parameters pointer from the stack and untag the length.
2169 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2170
2171 // Setup the elements pointer in the allocated arguments object and
2172 // initialize the header in the elements fixed array.
2173 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
2174 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2175 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2176 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2177 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2178 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2179
2180 // Copy the fixed array slots.
2181 Label loop;
2182 __ bind(&loop);
2183 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2184 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2185 __ addq(rdi, Immediate(kPointerSize));
2186 __ subq(rdx, Immediate(kPointerSize));
2187 __ decl(rcx);
2188 __ j(not_zero, &loop);
2189
2190 // Return and remove the on-stack parameters.
2191 __ bind(&done);
2192 __ ret(3 * kPointerSize);
2193
2194 // Do the runtime call to allocate the arguments object.
2195 __ bind(&runtime);
2196 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2197}
2198
2199
2200void RegExpExecStub::Generate(MacroAssembler* masm) {
2201 // Just jump directly to runtime if native RegExp is not selected at compile
2202 // time or if regexp entry in generated code is turned off runtime switch or
2203 // at compilation.
2204#ifdef V8_INTERPRETED_REGEXP
2205 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2206#else // V8_INTERPRETED_REGEXP
2207 if (!FLAG_regexp_entry_native) {
2208 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2209 return;
2210 }
2211
2212 // Stack frame on entry.
Steve Block1e0659c2011-05-24 12:43:12 +01002213 // rsp[0]: return address
2214 // rsp[8]: last_match_info (expected JSArray)
2215 // rsp[16]: previous index
2216 // rsp[24]: subject string
2217 // rsp[32]: JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002218
2219 static const int kLastMatchInfoOffset = 1 * kPointerSize;
2220 static const int kPreviousIndexOffset = 2 * kPointerSize;
2221 static const int kSubjectOffset = 3 * kPointerSize;
2222 static const int kJSRegExpOffset = 4 * kPointerSize;
2223
2224 Label runtime;
2225
2226 // Ensure that a RegExp stack is allocated.
2227 ExternalReference address_of_regexp_stack_memory_address =
2228 ExternalReference::address_of_regexp_stack_memory_address();
2229 ExternalReference address_of_regexp_stack_memory_size =
2230 ExternalReference::address_of_regexp_stack_memory_size();
2231 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2232 __ movq(kScratchRegister, Operand(kScratchRegister, 0));
2233 __ testq(kScratchRegister, kScratchRegister);
2234 __ j(zero, &runtime);
2235
2236
2237 // Check that the first argument is a JSRegExp object.
2238 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2239 __ JumpIfSmi(rax, &runtime);
2240 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2241 __ j(not_equal, &runtime);
2242 // Check that the RegExp has been compiled (data contains a fixed array).
2243 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2244 if (FLAG_debug_code) {
2245 Condition is_smi = masm->CheckSmi(rcx);
2246 __ Check(NegateCondition(is_smi),
2247 "Unexpected type for RegExp data, FixedArray expected");
2248 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
2249 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
2250 }
2251
2252 // rcx: RegExp data (FixedArray)
2253 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
2254 __ SmiToInteger32(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
2255 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2256 __ j(not_equal, &runtime);
2257
2258 // rcx: RegExp data (FixedArray)
2259 // Check that the number of captures fit in the static offsets vector buffer.
2260 __ SmiToInteger32(rdx,
2261 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2262 // Calculate number of capture registers (number_of_captures + 1) * 2.
2263 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2264 // Check that the static offsets vector buffer is large enough.
2265 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2266 __ j(above, &runtime);
2267
2268 // rcx: RegExp data (FixedArray)
2269 // rdx: Number of capture registers
2270 // Check that the second argument is a string.
2271 __ movq(rax, Operand(rsp, kSubjectOffset));
2272 __ JumpIfSmi(rax, &runtime);
2273 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2274 __ j(NegateCondition(is_string), &runtime);
2275
2276 // rax: Subject string.
2277 // rcx: RegExp data (FixedArray).
2278 // rdx: Number of capture registers.
2279 // Check that the third argument is a positive smi less than the string
2280 // length. A negative value will be greater (unsigned comparison).
2281 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
2282 __ JumpIfNotSmi(rbx, &runtime);
2283 __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset));
2284 __ j(above_equal, &runtime);
2285
2286 // rcx: RegExp data (FixedArray)
2287 // rdx: Number of capture registers
2288 // Check that the fourth object is a JSArray object.
2289 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2290 __ JumpIfSmi(rax, &runtime);
2291 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister);
2292 __ j(not_equal, &runtime);
2293 // Check that the JSArray is in fast case.
2294 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
2295 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
2296 __ Cmp(rax, Factory::fixed_array_map());
2297 __ j(not_equal, &runtime);
2298 // Check that the last match info has space for the capture registers and the
2299 // additional information. Ensure no overflow in add.
2300 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
2301 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
2302 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
2303 __ cmpl(rdx, rax);
2304 __ j(greater, &runtime);
2305
2306 // rcx: RegExp data (FixedArray)
2307 // Check the representation and encoding of the subject string.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002308 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002309 __ movq(rax, Operand(rsp, kSubjectOffset));
2310 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2311 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2312 // First check for flat two byte string.
2313 __ andb(rbx, Immediate(
2314 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
2315 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
2316 __ j(zero, &seq_two_byte_string);
2317 // Any other flat string must be a flat ascii string.
2318 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
2319 __ j(zero, &seq_ascii_string);
2320
2321 // Check for flat cons string.
2322 // A flat cons string is a cons string where the second part is the empty
2323 // string. In that case the subject string is just the first part of the cons
2324 // string. Also in this case the first part of the cons string is known to be
2325 // a sequential string or an external string.
2326 STATIC_ASSERT(kExternalStringTag !=0);
2327 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
2328 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2329 __ j(not_zero, &runtime);
2330 // String is a cons string.
2331 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset));
2332 __ Cmp(rdx, Factory::empty_string());
2333 __ j(not_equal, &runtime);
2334 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset));
2335 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2336 // String is a cons string with empty second part.
2337 // rax: first part of cons string.
2338 // rbx: map of first part of cons string.
2339 // Is first part a flat two byte string?
2340 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2341 Immediate(kStringRepresentationMask | kStringEncodingMask));
2342 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
2343 __ j(zero, &seq_two_byte_string);
2344 // Any other flat string must be ascii.
2345 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2346 Immediate(kStringRepresentationMask));
2347 __ j(not_zero, &runtime);
2348
2349 __ bind(&seq_ascii_string);
2350 // rax: subject string (sequential ascii)
2351 // rcx: RegExp data (FixedArray)
2352 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset));
2353 __ Set(rdi, 1); // Type is ascii.
2354 __ jmp(&check_code);
2355
2356 __ bind(&seq_two_byte_string);
2357 // rax: subject string (flat two-byte)
2358 // rcx: RegExp data (FixedArray)
2359 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset));
2360 __ Set(rdi, 0); // Type is two byte.
2361
2362 __ bind(&check_code);
2363 // Check that the irregexp code has been generated for the actual string
2364 // encoding. If it has, the field contains a code object otherwise it contains
2365 // the hole.
2366 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
2367 __ j(not_equal, &runtime);
2368
2369 // rax: subject string
2370 // rdi: encoding of subject string (1 if ascii, 0 if two_byte);
2371 // r11: code
2372 // Load used arguments before starting to push arguments for call to native
2373 // RegExp code to avoid handling changing stack height.
2374 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
2375
2376 // rax: subject string
2377 // rbx: previous index
2378 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
2379 // r11: code
2380 // All checks done. Now push arguments for native regexp code.
2381 __ IncrementCounter(&Counters::regexp_entry_native, 1);
2382
2383 // rsi is caller save on Windows and used to pass parameter on Linux.
2384 __ push(rsi);
2385
2386 static const int kRegExpExecuteArguments = 7;
2387 __ PrepareCallCFunction(kRegExpExecuteArguments);
2388 int argument_slots_on_stack =
2389 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
2390
2391 // Argument 7: Indicate that this is a direct call from JavaScript.
2392 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2393 Immediate(1));
2394
2395 // Argument 6: Start (high end) of backtracking stack memory area.
2396 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
2397 __ movq(r9, Operand(kScratchRegister, 0));
2398 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2399 __ addq(r9, Operand(kScratchRegister, 0));
2400 // Argument 6 passed in r9 on Linux and on the stack on Windows.
2401#ifdef _WIN64
2402 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
2403#endif
2404
2405 // Argument 5: static offsets vector buffer.
2406 __ movq(r8, ExternalReference::address_of_static_offsets_vector());
2407 // Argument 5 passed in r8 on Linux and on the stack on Windows.
2408#ifdef _WIN64
2409 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
2410#endif
2411
2412 // First four arguments are passed in registers on both Linux and Windows.
2413#ifdef _WIN64
2414 Register arg4 = r9;
2415 Register arg3 = r8;
2416 Register arg2 = rdx;
2417 Register arg1 = rcx;
2418#else
2419 Register arg4 = rcx;
2420 Register arg3 = rdx;
2421 Register arg2 = rsi;
2422 Register arg1 = rdi;
2423#endif
2424
2425 // Keep track on aliasing between argX defined above and the registers used.
2426 // rax: subject string
2427 // rbx: previous index
2428 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
2429 // r11: code
2430
2431 // Argument 4: End of string data
2432 // Argument 3: Start of string data
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002433 NearLabel setup_two_byte, setup_rest;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002434 __ testb(rdi, rdi);
2435 __ j(zero, &setup_two_byte);
2436 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
2437 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
2438 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
2439 __ jmp(&setup_rest);
2440 __ bind(&setup_two_byte);
2441 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
2442 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
2443 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
2444
2445 __ bind(&setup_rest);
2446 // Argument 2: Previous index.
2447 __ movq(arg2, rbx);
2448
2449 // Argument 1: Subject string.
2450 __ movq(arg1, rax);
2451
2452 // Locate the code entry and call it.
2453 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
2454 __ CallCFunction(r11, kRegExpExecuteArguments);
2455
2456 // rsi is caller save, as it is used to pass parameter.
2457 __ pop(rsi);
2458
2459 // Check the result.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002460 NearLabel success;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002461 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
2462 __ j(equal, &success);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002463 NearLabel failure;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002464 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
2465 __ j(equal, &failure);
2466 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
2467 // If not exception it can only be retry. Handle that in the runtime system.
2468 __ j(not_equal, &runtime);
2469 // Result must now be exception. If there is no pending exception already a
2470 // stack overflow (on the backtrack stack) was detected in RegExp code but
2471 // haven't created the exception yet. Handle that in the runtime system.
2472 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2473 ExternalReference pending_exception_address(Top::k_pending_exception_address);
2474 __ movq(kScratchRegister, pending_exception_address);
2475 __ Cmp(kScratchRegister, Factory::the_hole_value());
2476 __ j(equal, &runtime);
2477 __ bind(&failure);
2478 // For failure and exception return null.
2479 __ Move(rax, Factory::null_value());
2480 __ ret(4 * kPointerSize);
2481
2482 // Load RegExp data.
2483 __ bind(&success);
2484 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2485 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2486 __ SmiToInteger32(rax,
2487 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2488 // Calculate number of capture registers (number_of_captures + 1) * 2.
2489 __ leal(rdx, Operand(rax, rax, times_1, 2));
2490
2491 // rdx: Number of capture registers
2492 // Load last_match_info which is still known to be a fast case JSArray.
2493 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2494 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
2495
2496 // rbx: last_match_info backing store (FixedArray)
2497 // rdx: number of capture registers
2498 // Store the capture count.
2499 __ Integer32ToSmi(kScratchRegister, rdx);
2500 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2501 kScratchRegister);
2502 // Store last subject and last input.
2503 __ movq(rax, Operand(rsp, kSubjectOffset));
2504 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2505 __ movq(rcx, rbx);
2506 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
2507 __ movq(rax, Operand(rsp, kSubjectOffset));
2508 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2509 __ movq(rcx, rbx);
2510 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
2511
2512 // Get the static offsets vector filled by the native regexp code.
2513 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
2514
2515 // rbx: last_match_info backing store (FixedArray)
2516 // rcx: offsets vector
2517 // rdx: number of capture registers
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002518 NearLabel next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002519 // Capture register counter starts from number of capture registers and
2520 // counts down until wraping after zero.
2521 __ bind(&next_capture);
2522 __ subq(rdx, Immediate(1));
2523 __ j(negative, &done);
2524 // Read the value from the static offsets vector buffer and make it a smi.
2525 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002526 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002527 // Store the smi value in the last match info.
2528 __ movq(FieldOperand(rbx,
2529 rdx,
2530 times_pointer_size,
2531 RegExpImpl::kFirstCaptureOffset),
2532 rdi);
2533 __ jmp(&next_capture);
2534 __ bind(&done);
2535
2536 // Return last match info.
2537 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2538 __ ret(4 * kPointerSize);
2539
2540 // Do the runtime call to execute the regexp.
2541 __ bind(&runtime);
2542 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2543#endif // V8_INTERPRETED_REGEXP
2544}
2545
2546
Ben Murdochb0fe1622011-05-05 13:52:32 +01002547void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2548 const int kMaxInlineLength = 100;
2549 Label slowcase;
2550 Label done;
2551 __ movq(r8, Operand(rsp, kPointerSize * 3));
2552 __ JumpIfNotSmi(r8, &slowcase);
2553 __ SmiToInteger32(rbx, r8);
2554 __ cmpl(rbx, Immediate(kMaxInlineLength));
2555 __ j(above, &slowcase);
2556 // Smi-tagging is equivalent to multiplying by 2.
2557 STATIC_ASSERT(kSmiTag == 0);
2558 STATIC_ASSERT(kSmiTagSize == 1);
Steve Block1e0659c2011-05-24 12:43:12 +01002559 // Allocate RegExpResult followed by FixedArray with size in rbx.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002560 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2561 // Elements: [Map][Length][..elements..]
2562 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2563 times_pointer_size,
2564 rbx, // In: Number of elements.
2565 rax, // Out: Start of allocation (tagged).
2566 rcx, // Out: End of allocation.
2567 rdx, // Scratch register
2568 &slowcase,
2569 TAG_OBJECT);
2570 // rax: Start of allocated area, object-tagged.
2571 // rbx: Number of array elements as int32.
2572 // r8: Number of array elements as smi.
2573
2574 // Set JSArray map to global.regexp_result_map().
2575 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
2576 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2577 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2578 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2579
2580 // Set empty properties FixedArray.
2581 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
2582 Factory::empty_fixed_array());
2583
2584 // Set elements to point to FixedArray allocated right after the JSArray.
2585 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2586 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2587
2588 // Set input, index and length fields from arguments.
2589 __ movq(r8, Operand(rsp, kPointerSize * 1));
2590 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2591 __ movq(r8, Operand(rsp, kPointerSize * 2));
2592 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2593 __ movq(r8, Operand(rsp, kPointerSize * 3));
2594 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2595
2596 // Fill out the elements FixedArray.
2597 // rax: JSArray.
2598 // rcx: FixedArray.
2599 // rbx: Number of elements in array as int32.
2600
2601 // Set map.
2602 __ Move(FieldOperand(rcx, HeapObject::kMapOffset),
2603 Factory::fixed_array_map());
2604 // Set length.
2605 __ Integer32ToSmi(rdx, rbx);
2606 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2607 // Fill contents of fixed-array with the-hole.
2608 __ Move(rdx, Factory::the_hole_value());
2609 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2610 // Fill fixed array elements with hole.
2611 // rax: JSArray.
2612 // rbx: Number of elements in array that remains to be filled, as int32.
2613 // rcx: Start of elements in FixedArray.
2614 // rdx: the hole.
2615 Label loop;
2616 __ testl(rbx, rbx);
2617 __ bind(&loop);
Steve Block1e0659c2011-05-24 12:43:12 +01002618 __ j(less_equal, &done); // Jump if rcx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002619 __ subl(rbx, Immediate(1));
2620 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2621 __ jmp(&loop);
2622
2623 __ bind(&done);
2624 __ ret(3 * kPointerSize);
2625
2626 __ bind(&slowcase);
2627 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2628}
2629
2630
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002631void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2632 Register object,
2633 Register result,
2634 Register scratch1,
2635 Register scratch2,
2636 bool object_is_smi,
2637 Label* not_found) {
2638 // Use of registers. Register result is used as a temporary.
2639 Register number_string_cache = result;
2640 Register mask = scratch1;
2641 Register scratch = scratch2;
2642
2643 // Load the number string cache.
2644 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2645
2646 // Make the hash mask from the length of the number string cache. It
2647 // contains two elements (number and string) for each cache entry.
2648 __ SmiToInteger32(
2649 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2650 __ shrl(mask, Immediate(1));
2651 __ subq(mask, Immediate(1)); // Make mask.
2652
2653 // Calculate the entry in the number string cache. The hash value in the
2654 // number string cache for smis is just the smi value, and the hash for
2655 // doubles is the xor of the upper and lower words. See
2656 // Heap::GetNumberStringCache.
2657 Label is_smi;
2658 Label load_result_from_cache;
2659 if (!object_is_smi) {
2660 __ JumpIfSmi(object, &is_smi);
2661 __ CheckMap(object, Factory::heap_number_map(), not_found, true);
2662
2663 STATIC_ASSERT(8 == kDoubleSize);
2664 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2665 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2666 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2667
2668 Register index = scratch;
2669 Register probe = mask;
2670 __ movq(probe,
2671 FieldOperand(number_string_cache,
2672 index,
2673 times_1,
2674 FixedArray::kHeaderSize));
2675 __ JumpIfSmi(probe, not_found);
2676 ASSERT(CpuFeatures::IsSupported(SSE2));
2677 CpuFeatures::Scope fscope(SSE2);
2678 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2679 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
2680 __ ucomisd(xmm0, xmm1);
2681 __ j(parity_even, not_found); // Bail out if NaN is involved.
2682 __ j(not_equal, not_found); // The cache did not contain this value.
2683 __ jmp(&load_result_from_cache);
2684 }
2685
2686 __ bind(&is_smi);
2687 __ SmiToInteger32(scratch, object);
2688 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2689
2690 Register index = scratch;
2691 // Check if the entry is the smi we are looking for.
2692 __ cmpq(object,
2693 FieldOperand(number_string_cache,
2694 index,
2695 times_1,
2696 FixedArray::kHeaderSize));
2697 __ j(not_equal, not_found);
2698
2699 // Get the result from the cache.
2700 __ bind(&load_result_from_cache);
2701 __ movq(result,
2702 FieldOperand(number_string_cache,
2703 index,
2704 times_1,
2705 FixedArray::kHeaderSize + kPointerSize));
2706 __ IncrementCounter(&Counters::number_to_string_native, 1);
2707}
2708
2709
2710void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
2711 Register hash,
2712 Register mask) {
2713 __ and_(hash, mask);
2714 // Each entry in string cache consists of two pointer sized fields,
2715 // but times_twice_pointer_size (multiplication by 16) scale factor
2716 // is not supported by addrmode on x64 platform.
2717 // So we have to premultiply entry index before lookup.
2718 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
2719}
2720
2721
2722void NumberToStringStub::Generate(MacroAssembler* masm) {
2723 Label runtime;
2724
2725 __ movq(rbx, Operand(rsp, kPointerSize));
2726
2727 // Generate code to lookup number in the number string cache.
2728 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
2729 __ ret(1 * kPointerSize);
2730
2731 __ bind(&runtime);
2732 // Handle number to string in the runtime system if not found in the cache.
2733 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
2734}
2735
2736
2737static int NegativeComparisonResult(Condition cc) {
2738 ASSERT(cc != equal);
2739 ASSERT((cc == less) || (cc == less_equal)
2740 || (cc == greater) || (cc == greater_equal));
2741 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2742}
2743
2744
2745void CompareStub::Generate(MacroAssembler* masm) {
2746 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
2747
2748 Label check_unequal_objects, done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002749
2750 // Compare two smis if required.
2751 if (include_smi_compare_) {
2752 Label non_smi, smi_done;
2753 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
2754 __ subq(rdx, rax);
2755 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002756 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002757 __ bind(&smi_done);
2758 __ movq(rax, rdx);
2759 __ ret(0);
2760 __ bind(&non_smi);
2761 } else if (FLAG_debug_code) {
2762 Label ok;
2763 __ JumpIfNotSmi(rdx, &ok);
2764 __ JumpIfNotSmi(rax, &ok);
2765 __ Abort("CompareStub: smi operands");
2766 __ bind(&ok);
2767 }
2768
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002769 // The compare stub returns a positive, negative, or zero 64-bit integer
2770 // value in rax, corresponding to result of comparing the two inputs.
2771 // NOTICE! This code is only reached after a smi-fast-case check, so
2772 // it is certain that at least one operand isn't a smi.
2773
2774 // Two identical objects are equal unless they are both NaN or undefined.
2775 {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002776 NearLabel not_identical;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002777 __ cmpq(rax, rdx);
2778 __ j(not_equal, &not_identical);
2779
2780 if (cc_ != equal) {
2781 // Check for undefined. undefined OP undefined is false even though
2782 // undefined == undefined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002783 NearLabel check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002784 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2785 __ j(not_equal, &check_for_nan);
2786 __ Set(rax, NegativeComparisonResult(cc_));
2787 __ ret(0);
2788 __ bind(&check_for_nan);
2789 }
2790
2791 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
2792 // so we do the second best thing - test it ourselves.
2793 // Note: if cc_ != equal, never_nan_nan_ is not used.
2794 // We cannot set rax to EQUAL until just before return because
2795 // rax must be unchanged on jump to not_identical.
2796
2797 if (never_nan_nan_ && (cc_ == equal)) {
2798 __ Set(rax, EQUAL);
2799 __ ret(0);
2800 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002801 NearLabel heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002802 // If it's not a heap number, then return equal for (in)equality operator.
2803 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2804 Factory::heap_number_map());
2805 __ j(equal, &heap_number);
2806 if (cc_ != equal) {
2807 // Call runtime on identical JSObjects. Otherwise return equal.
2808 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2809 __ j(above_equal, &not_identical);
2810 }
2811 __ Set(rax, EQUAL);
2812 __ ret(0);
2813
2814 __ bind(&heap_number);
2815 // It is a heap number, so return equal if it's not NaN.
2816 // For NaN, return 1 for every condition except greater and
2817 // greater-equal. Return -1 for them, so the comparison yields
2818 // false for all conditions except not-equal.
2819 __ Set(rax, EQUAL);
2820 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2821 __ ucomisd(xmm0, xmm0);
2822 __ setcc(parity_even, rax);
2823 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
2824 if (cc_ == greater_equal || cc_ == greater) {
2825 __ neg(rax);
2826 }
2827 __ ret(0);
2828 }
2829
2830 __ bind(&not_identical);
2831 }
2832
2833 if (cc_ == equal) { // Both strict and non-strict.
2834 Label slow; // Fallthrough label.
2835
2836 // If we're doing a strict equality comparison, we don't have to do
2837 // type conversion, so we generate code to do fast comparison for objects
2838 // and oddballs. Non-smi numbers and strings still go through the usual
2839 // slow-case code.
2840 if (strict_) {
2841 // If either is a Smi (we know that not both are), then they can only
2842 // be equal if the other is a HeapNumber. If so, use the slow case.
2843 {
2844 Label not_smis;
2845 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
2846
2847 // Check if the non-smi operand is a heap number.
2848 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
2849 Factory::heap_number_map());
2850 // If heap number, handle it in the slow case.
2851 __ j(equal, &slow);
2852 // Return non-equal. ebx (the lower half of rbx) is not zero.
2853 __ movq(rax, rbx);
2854 __ ret(0);
2855
2856 __ bind(&not_smis);
2857 }
2858
2859 // If either operand is a JSObject or an oddball value, then they are not
2860 // equal since their pointers are different
2861 // There is no test for undetectability in strict equality.
2862
2863 // If the first object is a JS object, we have done pointer comparison.
2864 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002865 NearLabel first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002866 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2867 __ j(below, &first_non_object);
2868 // Return non-zero (eax (not rax) is not zero)
2869 Label return_not_equal;
2870 STATIC_ASSERT(kHeapObjectTag != 0);
2871 __ bind(&return_not_equal);
2872 __ ret(0);
2873
2874 __ bind(&first_non_object);
2875 // Check for oddballs: true, false, null, undefined.
2876 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2877 __ j(equal, &return_not_equal);
2878
2879 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2880 __ j(above_equal, &return_not_equal);
2881
2882 // Check for oddballs: true, false, null, undefined.
2883 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2884 __ j(equal, &return_not_equal);
2885
2886 // Fall through to the general case.
2887 }
2888 __ bind(&slow);
2889 }
2890
2891 // Generate the number comparison code.
2892 if (include_number_compare_) {
2893 Label non_number_comparison;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002894 NearLabel unordered;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002895 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
2896 __ xorl(rax, rax);
2897 __ xorl(rcx, rcx);
2898 __ ucomisd(xmm0, xmm1);
2899
2900 // Don't base result on EFLAGS when a NaN is involved.
2901 __ j(parity_even, &unordered);
2902 // Return a result of -1, 0, or 1, based on EFLAGS.
2903 __ setcc(above, rax);
2904 __ setcc(below, rcx);
2905 __ subq(rax, rcx);
2906 __ ret(0);
2907
2908 // If one of the numbers was NaN, then the result is always false.
2909 // The cc is never not-equal.
2910 __ bind(&unordered);
2911 ASSERT(cc_ != not_equal);
2912 if (cc_ == less || cc_ == less_equal) {
2913 __ Set(rax, 1);
2914 } else {
2915 __ Set(rax, -1);
2916 }
2917 __ ret(0);
2918
2919 // The number comparison code did not provide a valid result.
2920 __ bind(&non_number_comparison);
2921 }
2922
2923 // Fast negative check for symbol-to-symbol equality.
2924 Label check_for_strings;
2925 if (cc_ == equal) {
2926 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
2927 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
2928
2929 // We've already checked for object identity, so if both operands
2930 // are symbols they aren't equal. Register eax (not rax) already holds a
2931 // non-zero value, which indicates not equal, so just return.
2932 __ ret(0);
2933 }
2934
2935 __ bind(&check_for_strings);
2936
2937 __ JumpIfNotBothSequentialAsciiStrings(
2938 rdx, rax, rcx, rbx, &check_unequal_objects);
2939
2940 // Inline comparison of ascii strings.
2941 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2942 rdx,
2943 rax,
2944 rcx,
2945 rbx,
2946 rdi,
2947 r8);
2948
2949#ifdef DEBUG
2950 __ Abort("Unexpected fall-through from string comparison");
2951#endif
2952
2953 __ bind(&check_unequal_objects);
2954 if (cc_ == equal && !strict_) {
2955 // Not strict equality. Objects are unequal if
2956 // they are both JSObjects and not undetectable,
2957 // and their pointers are different.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002958 NearLabel not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002959 // At most one is a smi, so we can test for smi by adding the two.
2960 // A smi plus a heap object has the low bit set, a heap object plus
2961 // a heap object has the low bit clear.
2962 STATIC_ASSERT(kSmiTag == 0);
2963 STATIC_ASSERT(kSmiTagMask == 1);
2964 __ lea(rcx, Operand(rax, rdx, times_1, 0));
2965 __ testb(rcx, Immediate(kSmiTagMask));
2966 __ j(not_zero, &not_both_objects);
2967 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2968 __ j(below, &not_both_objects);
2969 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
2970 __ j(below, &not_both_objects);
2971 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2972 Immediate(1 << Map::kIsUndetectable));
2973 __ j(zero, &return_unequal);
2974 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2975 Immediate(1 << Map::kIsUndetectable));
2976 __ j(zero, &return_unequal);
2977 // The objects are both undetectable, so they both compare as the value
2978 // undefined, and are equal.
2979 __ Set(rax, EQUAL);
2980 __ bind(&return_unequal);
Steve Block1e0659c2011-05-24 12:43:12 +01002981 // Return non-equal by returning the non-zero object pointer in rax,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002982 // or return equal if we fell through to here.
2983 __ ret(0);
2984 __ bind(&not_both_objects);
2985 }
2986
2987 // Push arguments below the return address to prepare jump to builtin.
2988 __ pop(rcx);
2989 __ push(rdx);
2990 __ push(rax);
2991
2992 // Figure out which native to call and setup the arguments.
2993 Builtins::JavaScript builtin;
2994 if (cc_ == equal) {
2995 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2996 } else {
2997 builtin = Builtins::COMPARE;
2998 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
2999 }
3000
3001 // Restore return address on the stack.
3002 __ push(rcx);
3003
3004 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3005 // tagged as a small integer.
3006 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3007}
3008
3009
3010void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3011 Label* label,
3012 Register object,
3013 Register scratch) {
3014 __ JumpIfSmi(object, label);
3015 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3016 __ movzxbq(scratch,
3017 FieldOperand(scratch, Map::kInstanceTypeOffset));
3018 // Ensure that no non-strings have the symbol bit set.
3019 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3020 STATIC_ASSERT(kSymbolTag != 0);
3021 __ testb(scratch, Immediate(kIsSymbolMask));
3022 __ j(zero, label);
3023}
3024
3025
3026void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01003027 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003028}
3029
3030
3031void CallFunctionStub::Generate(MacroAssembler* masm) {
3032 Label slow;
3033
3034 // If the receiver might be a value (string, number or boolean) check for this
3035 // and box it if it is.
3036 if (ReceiverMightBeValue()) {
3037 // Get the receiver from the stack.
3038 // +1 ~ return address
3039 Label receiver_is_value, receiver_is_js_object;
3040 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
3041
3042 // Check if receiver is a smi (which is a number value).
3043 __ JumpIfSmi(rax, &receiver_is_value);
3044
3045 // Check if the receiver is a valid JS object.
3046 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
3047 __ j(above_equal, &receiver_is_js_object);
3048
3049 // Call the runtime to box the value.
3050 __ bind(&receiver_is_value);
3051 __ EnterInternalFrame();
3052 __ push(rax);
3053 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3054 __ LeaveInternalFrame();
3055 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
3056
3057 __ bind(&receiver_is_js_object);
3058 }
3059
3060 // Get the function to call from the stack.
3061 // +2 ~ receiver, return address
3062 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
3063
3064 // Check that the function really is a JavaScript function.
3065 __ JumpIfSmi(rdi, &slow);
3066 // Goto slow case if we do not have a function.
3067 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3068 __ j(not_equal, &slow);
3069
3070 // Fast-case: Just invoke the function.
3071 ParameterCount actual(argc_);
3072 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
3073
3074 // Slow-case: Non-function called.
3075 __ bind(&slow);
3076 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3077 // of the original receiver from the call site).
3078 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
3079 __ Set(rax, argc_);
3080 __ Set(rbx, 0);
3081 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
3082 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
3083 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3084}
3085
3086
3087void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3088 // Check that stack should contain next handler, frame pointer, state and
3089 // return address in that order.
3090 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
3091 StackHandlerConstants::kStateOffset);
3092 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
3093 StackHandlerConstants::kPCOffset);
3094
3095 ExternalReference handler_address(Top::k_handler_address);
3096 __ movq(kScratchRegister, handler_address);
3097 __ movq(rsp, Operand(kScratchRegister, 0));
3098 // get next in chain
3099 __ pop(rcx);
3100 __ movq(Operand(kScratchRegister, 0), rcx);
3101 __ pop(rbp); // pop frame pointer
3102 __ pop(rdx); // remove state
3103
3104 // Before returning we restore the context from the frame pointer if not NULL.
3105 // The frame pointer is NULL in the exception handler of a JS entry frame.
Steve Block9fac8402011-05-12 15:51:54 +01003106 __ Set(rsi, 0); // Tentatively set context pointer to NULL
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003107 NearLabel skip;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003108 __ cmpq(rbp, Immediate(0));
3109 __ j(equal, &skip);
3110 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3111 __ bind(&skip);
3112 __ ret(0);
3113}
3114
3115
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003116void CEntryStub::GenerateCore(MacroAssembler* masm,
3117 Label* throw_normal_exception,
3118 Label* throw_termination_exception,
3119 Label* throw_out_of_memory_exception,
3120 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01003121 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003122 // rax: result parameter for PerformGC, if any.
3123 // rbx: pointer to C function (C callee-saved).
3124 // rbp: frame pointer (restored after C call).
3125 // rsp: stack pointer (restored after C call).
3126 // r14: number of arguments including receiver (C callee-saved).
3127 // r12: pointer to the first argument (C callee-saved).
3128 // This pointer is reused in LeaveExitFrame(), so it is stored in a
3129 // callee-saved register.
3130
3131 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
3132 // Complex results must be written to address passed as first argument.
3133 // AMD64 calling convention: a struct of two pointers in rax+rdx
3134
3135 // Check stack alignment.
3136 if (FLAG_debug_code) {
3137 __ CheckStackAlignment();
3138 }
3139
3140 if (do_gc) {
3141 // Pass failure code returned from last attempt as first argument to
3142 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
3143 // stack is known to be aligned. This function takes one argument which is
3144 // passed in register.
3145#ifdef _WIN64
3146 __ movq(rcx, rax);
3147#else // _WIN64
3148 __ movq(rdi, rax);
3149#endif
3150 __ movq(kScratchRegister,
3151 FUNCTION_ADDR(Runtime::PerformGC),
3152 RelocInfo::RUNTIME_ENTRY);
3153 __ call(kScratchRegister);
3154 }
3155
3156 ExternalReference scope_depth =
3157 ExternalReference::heap_always_allocate_scope_depth();
3158 if (always_allocate_scope) {
3159 __ movq(kScratchRegister, scope_depth);
3160 __ incl(Operand(kScratchRegister, 0));
3161 }
3162
3163 // Call C function.
3164#ifdef _WIN64
3165 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
3166 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003167 __ movq(StackSpaceOperand(0), r14); // argc.
3168 __ movq(StackSpaceOperand(1), r12); // argv.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003169 if (result_size_ < 2) {
3170 // Pass a pointer to the Arguments object as the first argument.
3171 // Return result in single register (rax).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003172 __ lea(rcx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003173 } else {
3174 ASSERT_EQ(2, result_size_);
3175 // Pass a pointer to the result location as the first argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003176 __ lea(rcx, StackSpaceOperand(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003177 // Pass a pointer to the Arguments object as the second argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003178 __ lea(rdx, StackSpaceOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003179 }
3180
3181#else // _WIN64
3182 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
3183 __ movq(rdi, r14); // argc.
3184 __ movq(rsi, r12); // argv.
3185#endif
3186 __ call(rbx);
3187 // Result is in rax - do not destroy this register!
3188
3189 if (always_allocate_scope) {
3190 __ movq(kScratchRegister, scope_depth);
3191 __ decl(Operand(kScratchRegister, 0));
3192 }
3193
3194 // Check for failure result.
3195 Label failure_returned;
3196 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3197#ifdef _WIN64
3198 // If return value is on the stack, pop it to registers.
3199 if (result_size_ > 1) {
3200 ASSERT_EQ(2, result_size_);
3201 // Read result values stored on stack. Result is stored
3202 // above the four argument mirror slots and the two
3203 // Arguments object slots.
3204 __ movq(rax, Operand(rsp, 6 * kPointerSize));
3205 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
3206 }
3207#endif
3208 __ lea(rcx, Operand(rax, 1));
3209 // Lower 2 bits of rcx are 0 iff rax has failure tag.
3210 __ testl(rcx, Immediate(kFailureTagMask));
3211 __ j(zero, &failure_returned);
3212
3213 // Exit the JavaScript to C++ exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +01003214 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003215 __ ret(0);
3216
3217 // Handling of failure.
3218 __ bind(&failure_returned);
3219
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003220 NearLabel retry;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003221 // If the returned exception is RETRY_AFTER_GC continue at retry label
3222 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3223 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3224 __ j(zero, &retry);
3225
3226 // Special handling of out of memory exceptions.
3227 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
3228 __ cmpq(rax, kScratchRegister);
3229 __ j(equal, throw_out_of_memory_exception);
3230
3231 // Retrieve the pending exception and clear the variable.
3232 ExternalReference pending_exception_address(Top::k_pending_exception_address);
3233 __ movq(kScratchRegister, pending_exception_address);
3234 __ movq(rax, Operand(kScratchRegister, 0));
3235 __ movq(rdx, ExternalReference::the_hole_value_location());
3236 __ movq(rdx, Operand(rdx, 0));
3237 __ movq(Operand(kScratchRegister, 0), rdx);
3238
3239 // Special handling of termination exceptions which are uncatchable
3240 // by javascript code.
3241 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
3242 __ j(equal, throw_termination_exception);
3243
3244 // Handle normal exception.
3245 __ jmp(throw_normal_exception);
3246
3247 // Retry.
3248 __ bind(&retry);
3249}
3250
3251
3252void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3253 UncatchableExceptionType type) {
3254 // Fetch top stack handler.
3255 ExternalReference handler_address(Top::k_handler_address);
3256 __ movq(kScratchRegister, handler_address);
3257 __ movq(rsp, Operand(kScratchRegister, 0));
3258
3259 // Unwind the handlers until the ENTRY handler is found.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003260 NearLabel loop, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003261 __ bind(&loop);
3262 // Load the type of the current stack handler.
3263 const int kStateOffset = StackHandlerConstants::kStateOffset;
3264 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
3265 __ j(equal, &done);
3266 // Fetch the next handler in the list.
3267 const int kNextOffset = StackHandlerConstants::kNextOffset;
3268 __ movq(rsp, Operand(rsp, kNextOffset));
3269 __ jmp(&loop);
3270 __ bind(&done);
3271
3272 // Set the top handler address to next handler past the current ENTRY handler.
3273 __ movq(kScratchRegister, handler_address);
3274 __ pop(Operand(kScratchRegister, 0));
3275
3276 if (type == OUT_OF_MEMORY) {
3277 // Set external caught exception to false.
3278 ExternalReference external_caught(Top::k_external_caught_exception_address);
3279 __ movq(rax, Immediate(false));
3280 __ store_rax(external_caught);
3281
3282 // Set pending exception and rax to out of memory exception.
3283 ExternalReference pending_exception(Top::k_pending_exception_address);
3284 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
3285 __ store_rax(pending_exception);
3286 }
3287
3288 // Clear the context pointer.
Steve Block9fac8402011-05-12 15:51:54 +01003289 __ Set(rsi, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003290
3291 // Restore registers from handler.
3292 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
3293 StackHandlerConstants::kFPOffset);
3294 __ pop(rbp); // FP
3295 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
3296 StackHandlerConstants::kStateOffset);
3297 __ pop(rdx); // State
3298
3299 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
3300 StackHandlerConstants::kPCOffset);
3301 __ ret(0);
3302}
3303
3304
3305void CEntryStub::Generate(MacroAssembler* masm) {
3306 // rax: number of arguments including receiver
3307 // rbx: pointer to C function (C callee-saved)
3308 // rbp: frame pointer of calling JS frame (restored after C call)
3309 // rsp: stack pointer (restored after C call)
3310 // rsi: current context (restored)
3311
3312 // NOTE: Invocations of builtins may return failure objects
3313 // instead of a proper result. The builtin entry handles
3314 // this by performing a garbage collection and retrying the
3315 // builtin once.
3316
3317 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003318#ifdef _WIN64
3319 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
3320#else
3321 int arg_stack_space = 0;
3322#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003323 __ EnterExitFrame(arg_stack_space, save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003324
3325 // rax: Holds the context at this point, but should not be used.
3326 // On entry to code generated by GenerateCore, it must hold
3327 // a failure result if the collect_garbage argument to GenerateCore
3328 // is true. This failure result can be the result of code
3329 // generated by a previous call to GenerateCore. The value
3330 // of rax is then passed to Runtime::PerformGC.
3331 // rbx: pointer to builtin function (C callee-saved).
3332 // rbp: frame pointer of exit frame (restored after C call).
3333 // rsp: stack pointer (restored after C call).
3334 // r14: number of arguments including receiver (C callee-saved).
3335 // r12: argv pointer (C callee-saved).
3336
3337 Label throw_normal_exception;
3338 Label throw_termination_exception;
3339 Label throw_out_of_memory_exception;
3340
3341 // Call into the runtime system.
3342 GenerateCore(masm,
3343 &throw_normal_exception,
3344 &throw_termination_exception,
3345 &throw_out_of_memory_exception,
3346 false,
3347 false);
3348
3349 // Do space-specific GC and retry runtime call.
3350 GenerateCore(masm,
3351 &throw_normal_exception,
3352 &throw_termination_exception,
3353 &throw_out_of_memory_exception,
3354 true,
3355 false);
3356
3357 // Do full GC and retry runtime call one final time.
3358 Failure* failure = Failure::InternalError();
3359 __ movq(rax, failure, RelocInfo::NONE);
3360 GenerateCore(masm,
3361 &throw_normal_exception,
3362 &throw_termination_exception,
3363 &throw_out_of_memory_exception,
3364 true,
3365 true);
3366
3367 __ bind(&throw_out_of_memory_exception);
3368 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
3369
3370 __ bind(&throw_termination_exception);
3371 GenerateThrowUncatchable(masm, TERMINATION);
3372
3373 __ bind(&throw_normal_exception);
3374 GenerateThrowTOS(masm);
3375}
3376
3377
3378void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3379 Label invoke, exit;
3380#ifdef ENABLE_LOGGING_AND_PROFILING
3381 Label not_outermost_js, not_outermost_js_2;
3382#endif
3383
3384 // Setup frame.
3385 __ push(rbp);
3386 __ movq(rbp, rsp);
3387
3388 // Push the stack frame type marker twice.
3389 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3390 // Scratch register is neither callee-save, nor an argument register on any
3391 // platform. It's free to use at this point.
3392 // Cannot use smi-register for loading yet.
3393 __ movq(kScratchRegister,
3394 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3395 RelocInfo::NONE);
3396 __ push(kScratchRegister); // context slot
3397 __ push(kScratchRegister); // function slot
3398 // Save callee-saved registers (X64/Win64 calling conventions).
3399 __ push(r12);
3400 __ push(r13);
3401 __ push(r14);
3402 __ push(r15);
3403#ifdef _WIN64
3404 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3405 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3406#endif
3407 __ push(rbx);
3408 // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3409 // callee save as well.
3410
3411 // Save copies of the top frame descriptor on the stack.
3412 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
3413 __ load_rax(c_entry_fp);
3414 __ push(rax);
3415
3416 // Set up the roots and smi constant registers.
3417 // Needs to be done before any further smi loads.
3418 ExternalReference roots_address = ExternalReference::roots_address();
3419 __ movq(kRootRegister, roots_address);
3420 __ InitializeSmiConstantRegister();
3421
3422#ifdef ENABLE_LOGGING_AND_PROFILING
3423 // If this is the outermost JS call, set js_entry_sp value.
3424 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
3425 __ load_rax(js_entry_sp);
3426 __ testq(rax, rax);
3427 __ j(not_zero, &not_outermost_js);
3428 __ movq(rax, rbp);
3429 __ store_rax(js_entry_sp);
3430 __ bind(&not_outermost_js);
3431#endif
3432
3433 // Call a faked try-block that does the invoke.
3434 __ call(&invoke);
3435
3436 // Caught exception: Store result (exception) in the pending
3437 // exception field in the JSEnv and return a failure sentinel.
3438 ExternalReference pending_exception(Top::k_pending_exception_address);
3439 __ store_rax(pending_exception);
3440 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
3441 __ jmp(&exit);
3442
3443 // Invoke: Link this frame into the handler chain.
3444 __ bind(&invoke);
3445 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3446
3447 // Clear any pending exceptions.
3448 __ load_rax(ExternalReference::the_hole_value_location());
3449 __ store_rax(pending_exception);
3450
3451 // Fake a receiver (NULL).
3452 __ push(Immediate(0)); // receiver
3453
3454 // Invoke the function by calling through JS entry trampoline
3455 // builtin and pop the faked function when we return. We load the address
3456 // from an external reference instead of inlining the call target address
3457 // directly in the code, because the builtin stubs may not have been
3458 // generated yet at the time this code is generated.
3459 if (is_construct) {
3460 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
3461 __ load_rax(construct_entry);
3462 } else {
3463 ExternalReference entry(Builtins::JSEntryTrampoline);
3464 __ load_rax(entry);
3465 }
3466 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3467 __ call(kScratchRegister);
3468
3469 // Unlink this frame from the handler chain.
3470 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
3471 __ pop(Operand(kScratchRegister, 0));
3472 // Pop next_sp.
3473 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
3474
3475#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block1e0659c2011-05-24 12:43:12 +01003476 // If current RBP value is the same as js_entry_sp value, it means that
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003477 // the current function is the outermost.
3478 __ movq(kScratchRegister, js_entry_sp);
3479 __ cmpq(rbp, Operand(kScratchRegister, 0));
3480 __ j(not_equal, &not_outermost_js_2);
3481 __ movq(Operand(kScratchRegister, 0), Immediate(0));
3482 __ bind(&not_outermost_js_2);
3483#endif
3484
3485 // Restore the top frame descriptor from the stack.
3486 __ bind(&exit);
3487 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
3488 __ pop(Operand(kScratchRegister, 0));
3489
3490 // Restore callee-saved registers (X64 conventions).
3491 __ pop(rbx);
3492#ifdef _WIN64
3493 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
3494 __ pop(rsi);
3495 __ pop(rdi);
3496#endif
3497 __ pop(r15);
3498 __ pop(r14);
3499 __ pop(r13);
3500 __ pop(r12);
3501 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3502
3503 // Restore frame pointer and return.
3504 __ pop(rbp);
3505 __ ret(0);
3506}
3507
3508
3509void InstanceofStub::Generate(MacroAssembler* masm) {
3510 // Implements "value instanceof function" operator.
3511 // Expected input state:
3512 // rsp[0] : return address
3513 // rsp[1] : function pointer
3514 // rsp[2] : value
3515 // Returns a bitwise zero to indicate that the value
3516 // is and instance of the function and anything else to
3517 // indicate that the value is not an instance.
3518
3519 // Get the object - go slow case if it's a smi.
3520 Label slow;
3521 __ movq(rax, Operand(rsp, 2 * kPointerSize));
3522 __ JumpIfSmi(rax, &slow);
3523
3524 // Check that the left hand is a JS object. Leave its map in rax.
3525 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
3526 __ j(below, &slow);
3527 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
3528 __ j(above, &slow);
3529
3530 // Get the prototype of the function.
3531 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
3532 // rdx is function, rax is map.
3533
3534 // Look up the function and the map in the instanceof cache.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003535 NearLabel miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003536 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3537 __ j(not_equal, &miss);
3538 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3539 __ j(not_equal, &miss);
3540 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3541 __ ret(2 * kPointerSize);
3542
3543 __ bind(&miss);
3544 __ TryGetFunctionPrototype(rdx, rbx, &slow);
3545
3546 // Check that the function prototype is a JS object.
3547 __ JumpIfSmi(rbx, &slow);
3548 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
3549 __ j(below, &slow);
3550 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3551 __ j(above, &slow);
3552
3553 // Register mapping:
3554 // rax is object map.
3555 // rdx is function.
3556 // rbx is function prototype.
3557 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3558 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3559
3560 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3561
3562 // Loop through the prototype chain looking for the function prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003563 NearLabel loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003564 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
3565 __ bind(&loop);
3566 __ cmpq(rcx, rbx);
3567 __ j(equal, &is_instance);
3568 __ cmpq(rcx, kScratchRegister);
3569 // The code at is_not_instance assumes that kScratchRegister contains a
3570 // non-zero GCable value (the null object in this case).
3571 __ j(equal, &is_not_instance);
3572 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3573 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
3574 __ jmp(&loop);
3575
3576 __ bind(&is_instance);
3577 __ xorl(rax, rax);
3578 // Store bitwise zero in the cache. This is a Smi in GC terms.
3579 STATIC_ASSERT(kSmiTag == 0);
3580 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3581 __ ret(2 * kPointerSize);
3582
3583 __ bind(&is_not_instance);
3584 // We have to store a non-zero value in the cache.
3585 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3586 __ ret(2 * kPointerSize);
3587
3588 // Slow-case: Go through the JavaScript implementation.
3589 __ bind(&slow);
3590 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3591}
3592
3593
Steve Block1e0659c2011-05-24 12:43:12 +01003594Register InstanceofStub::left() { return rax; }
3595
3596
3597Register InstanceofStub::right() { return rdx; }
3598
3599
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003600int CompareStub::MinorKey() {
3601 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3602 // stubs the never NaN NaN condition is only taken into account if the
3603 // condition is equals.
3604 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
3605 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3606 return ConditionField::encode(static_cast<unsigned>(cc_))
3607 | RegisterField::encode(false) // lhs_ and rhs_ are not used
3608 | StrictField::encode(strict_)
3609 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003610 | IncludeNumberCompareField::encode(include_number_compare_)
3611 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003612}
3613
3614
3615// Unfortunately you have to run without snapshots to see most of these
3616// names in the profile since most compare stubs end up in the snapshot.
3617const char* CompareStub::GetName() {
3618 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3619
3620 if (name_ != NULL) return name_;
3621 const int kMaxNameLength = 100;
3622 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
3623 if (name_ == NULL) return "OOM";
3624
3625 const char* cc_name;
3626 switch (cc_) {
3627 case less: cc_name = "LT"; break;
3628 case greater: cc_name = "GT"; break;
3629 case less_equal: cc_name = "LE"; break;
3630 case greater_equal: cc_name = "GE"; break;
3631 case equal: cc_name = "EQ"; break;
3632 case not_equal: cc_name = "NE"; break;
3633 default: cc_name = "UnknownCondition"; break;
3634 }
3635
3636 const char* strict_name = "";
3637 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
3638 strict_name = "_STRICT";
3639 }
3640
3641 const char* never_nan_nan_name = "";
3642 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
3643 never_nan_nan_name = "_NO_NAN";
3644 }
3645
3646 const char* include_number_compare_name = "";
3647 if (!include_number_compare_) {
3648 include_number_compare_name = "_NO_NUMBER";
3649 }
3650
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003651 const char* include_smi_compare_name = "";
3652 if (!include_smi_compare_) {
3653 include_smi_compare_name = "_NO_SMI";
3654 }
3655
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003656 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3657 "CompareStub_%s%s%s%s",
3658 cc_name,
3659 strict_name,
3660 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003661 include_number_compare_name,
3662 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003663 return name_;
3664}
3665
3666
3667// -------------------------------------------------------------------------
3668// StringCharCodeAtGenerator
3669
3670void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3671 Label flat_string;
3672 Label ascii_string;
3673 Label got_char_code;
3674
3675 // If the receiver is a smi trigger the non-string case.
3676 __ JumpIfSmi(object_, receiver_not_string_);
3677
3678 // Fetch the instance type of the receiver into result register.
3679 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3680 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3681 // If the receiver is not a string trigger the non-string case.
3682 __ testb(result_, Immediate(kIsNotStringMask));
3683 __ j(not_zero, receiver_not_string_);
3684
3685 // If the index is non-smi trigger the non-smi case.
3686 __ JumpIfNotSmi(index_, &index_not_smi_);
3687
3688 // Put smi-tagged index into scratch register.
3689 __ movq(scratch_, index_);
3690 __ bind(&got_smi_index_);
3691
3692 // Check for index out of range.
3693 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
3694 __ j(above_equal, index_out_of_range_);
3695
3696 // We need special handling for non-flat strings.
3697 STATIC_ASSERT(kSeqStringTag == 0);
3698 __ testb(result_, Immediate(kStringRepresentationMask));
3699 __ j(zero, &flat_string);
3700
3701 // Handle non-flat strings.
3702 __ testb(result_, Immediate(kIsConsStringMask));
3703 __ j(zero, &call_runtime_);
3704
3705 // ConsString.
3706 // Check whether the right hand side is the empty string (i.e. if
3707 // this is really a flat string in a cons string). If that is not
3708 // the case we would rather go to the runtime system now to flatten
3709 // the string.
3710 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
3711 Heap::kEmptyStringRootIndex);
3712 __ j(not_equal, &call_runtime_);
3713 // Get the first of the two strings and load its instance type.
3714 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
3715 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3716 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3717 // If the first cons component is also non-flat, then go to runtime.
3718 STATIC_ASSERT(kSeqStringTag == 0);
3719 __ testb(result_, Immediate(kStringRepresentationMask));
3720 __ j(not_zero, &call_runtime_);
3721
3722 // Check for 1-byte or 2-byte string.
3723 __ bind(&flat_string);
3724 STATIC_ASSERT(kAsciiStringTag != 0);
3725 __ testb(result_, Immediate(kStringEncodingMask));
3726 __ j(not_zero, &ascii_string);
3727
3728 // 2-byte string.
3729 // Load the 2-byte character code into the result register.
3730 __ SmiToInteger32(scratch_, scratch_);
3731 __ movzxwl(result_, FieldOperand(object_,
3732 scratch_, times_2,
3733 SeqTwoByteString::kHeaderSize));
3734 __ jmp(&got_char_code);
3735
3736 // ASCII string.
3737 // Load the byte into the result register.
3738 __ bind(&ascii_string);
3739 __ SmiToInteger32(scratch_, scratch_);
3740 __ movzxbl(result_, FieldOperand(object_,
3741 scratch_, times_1,
3742 SeqAsciiString::kHeaderSize));
3743 __ bind(&got_char_code);
3744 __ Integer32ToSmi(result_, result_);
3745 __ bind(&exit_);
3746}
3747
3748
3749void StringCharCodeAtGenerator::GenerateSlow(
3750 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3751 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
3752
3753 // Index is not a smi.
3754 __ bind(&index_not_smi_);
3755 // If index is a heap number, try converting it to an integer.
3756 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
3757 call_helper.BeforeCall(masm);
3758 __ push(object_);
3759 __ push(index_);
3760 __ push(index_); // Consumed by runtime conversion function.
3761 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3762 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3763 } else {
3764 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3765 // NumberToSmi discards numbers that are not exact integers.
3766 __ CallRuntime(Runtime::kNumberToSmi, 1);
3767 }
3768 if (!scratch_.is(rax)) {
3769 // Save the conversion result before the pop instructions below
3770 // have a chance to overwrite it.
3771 __ movq(scratch_, rax);
3772 }
3773 __ pop(index_);
3774 __ pop(object_);
3775 // Reload the instance type.
3776 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3777 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3778 call_helper.AfterCall(masm);
3779 // If index is still not a smi, it must be out of range.
3780 __ JumpIfNotSmi(scratch_, index_out_of_range_);
3781 // Otherwise, return to the fast path.
3782 __ jmp(&got_smi_index_);
3783
3784 // Call runtime. We get here when the receiver is a string and the
3785 // index is a number, but the code of getting the actual character
3786 // is too complex (e.g., when the string needs to be flattened).
3787 __ bind(&call_runtime_);
3788 call_helper.BeforeCall(masm);
3789 __ push(object_);
3790 __ push(index_);
3791 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3792 if (!result_.is(rax)) {
3793 __ movq(result_, rax);
3794 }
3795 call_helper.AfterCall(masm);
3796 __ jmp(&exit_);
3797
3798 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
3799}
3800
3801
3802// -------------------------------------------------------------------------
3803// StringCharFromCodeGenerator
3804
3805void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3806 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3807 __ JumpIfNotSmi(code_, &slow_case_);
3808 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
3809 __ j(above, &slow_case_);
3810
3811 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3812 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3813 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
3814 FixedArray::kHeaderSize));
3815 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3816 __ j(equal, &slow_case_);
3817 __ bind(&exit_);
3818}
3819
3820
3821void StringCharFromCodeGenerator::GenerateSlow(
3822 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3823 __ Abort("Unexpected fallthrough to CharFromCode slow case");
3824
3825 __ bind(&slow_case_);
3826 call_helper.BeforeCall(masm);
3827 __ push(code_);
3828 __ CallRuntime(Runtime::kCharFromCode, 1);
3829 if (!result_.is(rax)) {
3830 __ movq(result_, rax);
3831 }
3832 call_helper.AfterCall(masm);
3833 __ jmp(&exit_);
3834
3835 __ Abort("Unexpected fallthrough from CharFromCode slow case");
3836}
3837
3838
3839// -------------------------------------------------------------------------
3840// StringCharAtGenerator
3841
3842void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
3843 char_code_at_generator_.GenerateFast(masm);
3844 char_from_code_generator_.GenerateFast(masm);
3845}
3846
3847
3848void StringCharAtGenerator::GenerateSlow(
3849 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
3850 char_code_at_generator_.GenerateSlow(masm, call_helper);
3851 char_from_code_generator_.GenerateSlow(masm, call_helper);
3852}
3853
3854
3855void StringAddStub::Generate(MacroAssembler* masm) {
3856 Label string_add_runtime;
3857
3858 // Load the two arguments.
3859 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument.
3860 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument.
3861
3862 // Make sure that both arguments are strings if not known in advance.
3863 if (string_check_) {
3864 Condition is_smi;
3865 is_smi = masm->CheckSmi(rax);
3866 __ j(is_smi, &string_add_runtime);
3867 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
3868 __ j(above_equal, &string_add_runtime);
3869
3870 // First argument is a a string, test second.
3871 is_smi = masm->CheckSmi(rdx);
3872 __ j(is_smi, &string_add_runtime);
3873 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3874 __ j(above_equal, &string_add_runtime);
3875 }
3876
3877 // Both arguments are strings.
3878 // rax: first string
3879 // rdx: second string
3880 // Check if either of the strings are empty. In that case return the other.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003881 NearLabel second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003882 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
3883 __ SmiTest(rcx);
3884 __ j(not_zero, &second_not_zero_length);
3885 // Second string is empty, result is first string which is already in rax.
3886 __ IncrementCounter(&Counters::string_add_native, 1);
3887 __ ret(2 * kPointerSize);
3888 __ bind(&second_not_zero_length);
3889 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
3890 __ SmiTest(rbx);
3891 __ j(not_zero, &both_not_zero_length);
3892 // First string is empty, result is second string which is in rdx.
3893 __ movq(rax, rdx);
3894 __ IncrementCounter(&Counters::string_add_native, 1);
3895 __ ret(2 * kPointerSize);
3896
3897 // Both strings are non-empty.
3898 // rax: first string
3899 // rbx: length of first string
3900 // rcx: length of second string
3901 // rdx: second string
3902 // r8: map of first string if string check was performed above
3903 // r9: map of second string if string check was performed above
3904 Label string_add_flat_result, longer_than_two;
3905 __ bind(&both_not_zero_length);
3906
3907 // If arguments where known to be strings, maps are not loaded to r8 and r9
3908 // by the code above.
3909 if (!string_check_) {
3910 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
3911 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
3912 }
3913 // Get the instance types of the two strings as they will be needed soon.
3914 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
3915 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
3916
3917 // Look at the length of the result of adding the two strings.
3918 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003919 __ SmiAdd(rbx, rbx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003920 // Use the runtime system when adding two one character strings, as it
3921 // contains optimizations for this specific case using the symbol table.
3922 __ SmiCompare(rbx, Smi::FromInt(2));
3923 __ j(not_equal, &longer_than_two);
3924
3925 // Check that both strings are non-external ascii strings.
3926 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
3927 &string_add_runtime);
3928
3929 // Get the two characters forming the sub string.
3930 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
3931 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
3932
3933 // Try to lookup two character string in symbol table. If it is not found
3934 // just allocate a new one.
3935 Label make_two_character_string, make_flat_ascii_string;
3936 StringHelper::GenerateTwoCharacterSymbolTableProbe(
3937 masm, rbx, rcx, r14, r11, rdi, r12, &make_two_character_string);
3938 __ IncrementCounter(&Counters::string_add_native, 1);
3939 __ ret(2 * kPointerSize);
3940
3941 __ bind(&make_two_character_string);
3942 __ Set(rbx, 2);
3943 __ jmp(&make_flat_ascii_string);
3944
3945 __ bind(&longer_than_two);
3946 // Check if resulting string will be flat.
3947 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
3948 __ j(below, &string_add_flat_result);
3949 // Handle exceptionally long strings in the runtime system.
3950 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
3951 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
3952 __ j(above, &string_add_runtime);
3953
3954 // If result is not supposed to be flat, allocate a cons string object. If
3955 // both strings are ascii the result is an ascii cons string.
3956 // rax: first string
3957 // rbx: length of resulting flat string
3958 // rdx: second string
3959 // r8: instance type of first string
3960 // r9: instance type of second string
3961 Label non_ascii, allocated, ascii_data;
3962 __ movl(rcx, r8);
3963 __ and_(rcx, r9);
3964 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
3965 __ testl(rcx, Immediate(kAsciiStringTag));
3966 __ j(zero, &non_ascii);
3967 __ bind(&ascii_data);
3968 // Allocate an acsii cons string.
3969 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
3970 __ bind(&allocated);
3971 // Fill the fields of the cons string.
3972 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
3973 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
3974 Immediate(String::kEmptyHashField));
3975 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3976 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3977 __ movq(rax, rcx);
3978 __ IncrementCounter(&Counters::string_add_native, 1);
3979 __ ret(2 * kPointerSize);
3980 __ bind(&non_ascii);
3981 // At least one of the strings is two-byte. Check whether it happens
3982 // to contain only ascii characters.
3983 // rcx: first instance type AND second instance type.
3984 // r8: first instance type.
3985 // r9: second instance type.
3986 __ testb(rcx, Immediate(kAsciiDataHintMask));
3987 __ j(not_zero, &ascii_data);
3988 __ xor_(r8, r9);
3989 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
3990 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3991 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
3992 __ j(equal, &ascii_data);
3993 // Allocate a two byte cons string.
3994 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
3995 __ jmp(&allocated);
3996
3997 // Handle creating a flat result. First check that both strings are not
3998 // external strings.
3999 // rax: first string
4000 // rbx: length of resulting flat string as smi
4001 // rdx: second string
4002 // r8: instance type of first string
4003 // r9: instance type of first string
4004 __ bind(&string_add_flat_result);
4005 __ SmiToInteger32(rbx, rbx);
4006 __ movl(rcx, r8);
4007 __ and_(rcx, Immediate(kStringRepresentationMask));
4008 __ cmpl(rcx, Immediate(kExternalStringTag));
4009 __ j(equal, &string_add_runtime);
4010 __ movl(rcx, r9);
4011 __ and_(rcx, Immediate(kStringRepresentationMask));
4012 __ cmpl(rcx, Immediate(kExternalStringTag));
4013 __ j(equal, &string_add_runtime);
4014 // Now check if both strings are ascii strings.
4015 // rax: first string
4016 // rbx: length of resulting flat string
4017 // rdx: second string
4018 // r8: instance type of first string
4019 // r9: instance type of second string
4020 Label non_ascii_string_add_flat_result;
4021 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4022 __ testl(r8, Immediate(kAsciiStringTag));
4023 __ j(zero, &non_ascii_string_add_flat_result);
4024 __ testl(r9, Immediate(kAsciiStringTag));
4025 __ j(zero, &string_add_runtime);
4026
4027 __ bind(&make_flat_ascii_string);
4028 // Both strings are ascii strings. As they are short they are both flat.
4029 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4030 // rcx: result string
4031 __ movq(rbx, rcx);
4032 // Locate first character of result.
4033 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4034 // Locate first character of first argument
4035 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4036 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4037 // rax: first char of first argument
4038 // rbx: result string
4039 // rcx: first character of result
4040 // rdx: second string
4041 // rdi: length of first argument
4042 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
4043 // Locate first character of second argument.
4044 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4045 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4046 // rbx: result string
4047 // rcx: next character of result
4048 // rdx: first char of second argument
4049 // rdi: length of second argument
4050 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
4051 __ movq(rax, rbx);
4052 __ IncrementCounter(&Counters::string_add_native, 1);
4053 __ ret(2 * kPointerSize);
4054
4055 // Handle creating a flat two byte result.
4056 // rax: first string - known to be two byte
4057 // rbx: length of resulting flat string
4058 // rdx: second string
4059 // r8: instance type of first string
4060 // r9: instance type of first string
4061 __ bind(&non_ascii_string_add_flat_result);
4062 __ and_(r9, Immediate(kAsciiStringTag));
4063 __ j(not_zero, &string_add_runtime);
4064 // Both strings are two byte strings. As they are short they are both
4065 // flat.
4066 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4067 // rcx: result string
4068 __ movq(rbx, rcx);
4069 // Locate first character of result.
4070 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4071 // Locate first character of first argument.
4072 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4073 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4074 // rax: first char of first argument
4075 // rbx: result string
4076 // rcx: first character of result
4077 // rdx: second argument
4078 // rdi: length of first argument
4079 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
4080 // Locate first character of second argument.
4081 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4082 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4083 // rbx: result string
4084 // rcx: next character of result
4085 // rdx: first char of second argument
4086 // rdi: length of second argument
4087 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
4088 __ movq(rax, rbx);
4089 __ IncrementCounter(&Counters::string_add_native, 1);
4090 __ ret(2 * kPointerSize);
4091
4092 // Just jump to runtime to add the two strings.
4093 __ bind(&string_add_runtime);
4094 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4095}
4096
4097
4098void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
4099 Register dest,
4100 Register src,
4101 Register count,
4102 bool ascii) {
4103 Label loop;
4104 __ bind(&loop);
4105 // This loop just copies one character at a time, as it is only used for very
4106 // short strings.
4107 if (ascii) {
4108 __ movb(kScratchRegister, Operand(src, 0));
4109 __ movb(Operand(dest, 0), kScratchRegister);
4110 __ incq(src);
4111 __ incq(dest);
4112 } else {
4113 __ movzxwl(kScratchRegister, Operand(src, 0));
4114 __ movw(Operand(dest, 0), kScratchRegister);
4115 __ addq(src, Immediate(2));
4116 __ addq(dest, Immediate(2));
4117 }
4118 __ decl(count);
4119 __ j(not_zero, &loop);
4120}
4121
4122
4123void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4124 Register dest,
4125 Register src,
4126 Register count,
4127 bool ascii) {
4128 // Copy characters using rep movs of doublewords. Align destination on 4 byte
4129 // boundary before starting rep movs. Copy remaining characters after running
4130 // rep movs.
4131 // Count is positive int32, dest and src are character pointers.
4132 ASSERT(dest.is(rdi)); // rep movs destination
4133 ASSERT(src.is(rsi)); // rep movs source
4134 ASSERT(count.is(rcx)); // rep movs count
4135
4136 // Nothing to do for zero characters.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004137 NearLabel done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004138 __ testl(count, count);
4139 __ j(zero, &done);
4140
4141 // Make count the number of bytes to copy.
4142 if (!ascii) {
4143 STATIC_ASSERT(2 == sizeof(uc16));
4144 __ addl(count, count);
4145 }
4146
4147 // Don't enter the rep movs if there are less than 4 bytes to copy.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004148 NearLabel last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004149 __ testl(count, Immediate(~7));
4150 __ j(zero, &last_bytes);
4151
4152 // Copy from edi to esi using rep movs instruction.
4153 __ movl(kScratchRegister, count);
4154 __ shr(count, Immediate(3)); // Number of doublewords to copy.
4155 __ repmovsq();
4156
4157 // Find number of bytes left.
4158 __ movl(count, kScratchRegister);
4159 __ and_(count, Immediate(7));
4160
4161 // Check if there are more bytes to copy.
4162 __ bind(&last_bytes);
4163 __ testl(count, count);
4164 __ j(zero, &done);
4165
4166 // Copy remaining characters.
4167 Label loop;
4168 __ bind(&loop);
4169 __ movb(kScratchRegister, Operand(src, 0));
4170 __ movb(Operand(dest, 0), kScratchRegister);
4171 __ incq(src);
4172 __ incq(dest);
4173 __ decl(count);
4174 __ j(not_zero, &loop);
4175
4176 __ bind(&done);
4177}
4178
4179void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4180 Register c1,
4181 Register c2,
4182 Register scratch1,
4183 Register scratch2,
4184 Register scratch3,
4185 Register scratch4,
4186 Label* not_found) {
4187 // Register scratch3 is the general scratch register in this function.
4188 Register scratch = scratch3;
4189
4190 // Make sure that both characters are not digits as such strings has a
4191 // different hash algorithm. Don't try to look for these in the symbol table.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004192 NearLabel not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004193 __ leal(scratch, Operand(c1, -'0'));
4194 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4195 __ j(above, &not_array_index);
4196 __ leal(scratch, Operand(c2, -'0'));
4197 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4198 __ j(below_equal, not_found);
4199
4200 __ bind(&not_array_index);
4201 // Calculate the two character string hash.
4202 Register hash = scratch1;
4203 GenerateHashInit(masm, hash, c1, scratch);
4204 GenerateHashAddCharacter(masm, hash, c2, scratch);
4205 GenerateHashGetHash(masm, hash, scratch);
4206
4207 // Collect the two characters in a register.
4208 Register chars = c1;
4209 __ shl(c2, Immediate(kBitsPerByte));
4210 __ orl(chars, c2);
4211
4212 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4213 // hash: hash of two character string.
4214
4215 // Load the symbol table.
4216 Register symbol_table = c2;
4217 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
4218
4219 // Calculate capacity mask from the symbol table capacity.
4220 Register mask = scratch2;
4221 __ SmiToInteger32(mask,
4222 FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
4223 __ decl(mask);
4224
4225 Register undefined = scratch4;
4226 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4227
4228 // Registers
4229 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4230 // hash: hash of two character string (32-bit int)
4231 // symbol_table: symbol table
4232 // mask: capacity mask (32-bit int)
4233 // undefined: undefined value
4234 // scratch: -
4235
4236 // Perform a number of probes in the symbol table.
4237 static const int kProbes = 4;
4238 Label found_in_symbol_table;
4239 Label next_probe[kProbes];
4240 for (int i = 0; i < kProbes; i++) {
4241 // Calculate entry in symbol table.
4242 __ movl(scratch, hash);
4243 if (i > 0) {
4244 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
4245 }
4246 __ andl(scratch, mask);
4247
4248 // Load the entry from the symble table.
4249 Register candidate = scratch; // Scratch register contains candidate.
4250 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
4251 __ movq(candidate,
4252 FieldOperand(symbol_table,
4253 scratch,
4254 times_pointer_size,
4255 SymbolTable::kElementsStartOffset));
4256
4257 // If entry is undefined no string with this hash can be found.
4258 __ cmpq(candidate, undefined);
4259 __ j(equal, not_found);
4260
4261 // If length is not 2 the string is not a candidate.
4262 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
4263 Smi::FromInt(2));
4264 __ j(not_equal, &next_probe[i]);
4265
4266 // We use kScratchRegister as a temporary register in assumption that
4267 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
4268 Register temp = kScratchRegister;
4269
4270 // Check that the candidate is a non-external ascii string.
4271 __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
4272 __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4273 __ JumpIfInstanceTypeIsNotSequentialAscii(
4274 temp, temp, &next_probe[i]);
4275
4276 // Check if the two characters match.
4277 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
4278 __ andl(temp, Immediate(0x0000ffff));
4279 __ cmpl(chars, temp);
4280 __ j(equal, &found_in_symbol_table);
4281 __ bind(&next_probe[i]);
4282 }
4283
4284 // No matching 2 character string found by probing.
4285 __ jmp(not_found);
4286
4287 // Scratch register contains result when we fall through to here.
4288 Register result = scratch;
4289 __ bind(&found_in_symbol_table);
4290 if (!result.is(rax)) {
4291 __ movq(rax, result);
4292 }
4293}
4294
4295
4296void StringHelper::GenerateHashInit(MacroAssembler* masm,
4297 Register hash,
4298 Register character,
4299 Register scratch) {
4300 // hash = character + (character << 10);
4301 __ movl(hash, character);
4302 __ shll(hash, Immediate(10));
4303 __ addl(hash, character);
4304 // hash ^= hash >> 6;
4305 __ movl(scratch, hash);
4306 __ sarl(scratch, Immediate(6));
4307 __ xorl(hash, scratch);
4308}
4309
4310
4311void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4312 Register hash,
4313 Register character,
4314 Register scratch) {
4315 // hash += character;
4316 __ addl(hash, character);
4317 // hash += hash << 10;
4318 __ movl(scratch, hash);
4319 __ shll(scratch, Immediate(10));
4320 __ addl(hash, scratch);
4321 // hash ^= hash >> 6;
4322 __ movl(scratch, hash);
4323 __ sarl(scratch, Immediate(6));
4324 __ xorl(hash, scratch);
4325}
4326
4327
4328void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4329 Register hash,
4330 Register scratch) {
4331 // hash += hash << 3;
4332 __ leal(hash, Operand(hash, hash, times_8, 0));
4333 // hash ^= hash >> 11;
4334 __ movl(scratch, hash);
4335 __ sarl(scratch, Immediate(11));
4336 __ xorl(hash, scratch);
4337 // hash += hash << 15;
4338 __ movl(scratch, hash);
4339 __ shll(scratch, Immediate(15));
4340 __ addl(hash, scratch);
4341
4342 // if (hash == 0) hash = 27;
4343 Label hash_not_zero;
4344 __ j(not_zero, &hash_not_zero);
4345 __ movl(hash, Immediate(27));
4346 __ bind(&hash_not_zero);
4347}
4348
4349void SubStringStub::Generate(MacroAssembler* masm) {
4350 Label runtime;
4351
4352 // Stack frame on entry.
4353 // rsp[0]: return address
4354 // rsp[8]: to
4355 // rsp[16]: from
4356 // rsp[24]: string
4357
4358 const int kToOffset = 1 * kPointerSize;
4359 const int kFromOffset = kToOffset + kPointerSize;
4360 const int kStringOffset = kFromOffset + kPointerSize;
4361 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
4362
4363 // Make sure first argument is a string.
4364 __ movq(rax, Operand(rsp, kStringOffset));
4365 STATIC_ASSERT(kSmiTag == 0);
4366 __ testl(rax, Immediate(kSmiTagMask));
4367 __ j(zero, &runtime);
4368 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4369 __ j(NegateCondition(is_string), &runtime);
4370
4371 // rax: string
4372 // rbx: instance type
4373 // Calculate length of sub string using the smi values.
4374 Label result_longer_than_two;
4375 __ movq(rcx, Operand(rsp, kToOffset));
4376 __ movq(rdx, Operand(rsp, kFromOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01004377 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004378
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004379 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004380 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
4381 Label return_rax;
4382 __ j(equal, &return_rax);
4383 // Special handling of sub-strings of length 1 and 2. One character strings
4384 // are handled in the runtime system (looked up in the single character
4385 // cache). Two character strings are looked for in the symbol cache.
4386 __ SmiToInteger32(rcx, rcx);
4387 __ cmpl(rcx, Immediate(2));
4388 __ j(greater, &result_longer_than_two);
4389 __ j(less, &runtime);
4390
4391 // Sub string of length 2 requested.
4392 // rax: string
4393 // rbx: instance type
4394 // rcx: sub string length (value is 2)
4395 // rdx: from index (smi)
4396 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
4397
4398 // Get the two characters forming the sub string.
4399 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
4400 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
4401 __ movzxbq(rcx,
4402 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
4403
4404 // Try to lookup two character string in symbol table.
4405 Label make_two_character_string;
4406 StringHelper::GenerateTwoCharacterSymbolTableProbe(
4407 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
4408 __ ret(3 * kPointerSize);
4409
4410 __ bind(&make_two_character_string);
4411 // Setup registers for allocating the two character string.
4412 __ movq(rax, Operand(rsp, kStringOffset));
4413 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
4414 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4415 __ Set(rcx, 2);
4416
4417 __ bind(&result_longer_than_two);
4418
4419 // rax: string
4420 // rbx: instance type
4421 // rcx: result string length
4422 // Check for flat ascii string
4423 Label non_ascii_flat;
4424 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
4425
4426 // Allocate the result.
4427 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
4428
4429 // rax: result string
4430 // rcx: result string length
4431 __ movq(rdx, rsi); // esi used by following code.
4432 // Locate first character of result.
4433 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4434 // Load string argument and locate character of sub string start.
4435 __ movq(rsi, Operand(rsp, kStringOffset));
4436 __ movq(rbx, Operand(rsp, kFromOffset));
4437 {
4438 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
4439 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4440 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4441 }
4442
4443 // rax: result string
4444 // rcx: result length
4445 // rdx: original value of rsi
4446 // rdi: first character of result
4447 // rsi: character of sub string start
4448 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4449 __ movq(rsi, rdx); // Restore rsi.
4450 __ IncrementCounter(&Counters::sub_string_native, 1);
4451 __ ret(kArgumentsSize);
4452
4453 __ bind(&non_ascii_flat);
4454 // rax: string
4455 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
4456 // rcx: result string length
4457 // Check for sequential two byte string
4458 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
4459 __ j(not_equal, &runtime);
4460
4461 // Allocate the result.
4462 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
4463
4464 // rax: result string
4465 // rcx: result string length
4466 __ movq(rdx, rsi); // esi used by following code.
4467 // Locate first character of result.
4468 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
4469 // Load string argument and locate character of sub string start.
4470 __ movq(rsi, Operand(rsp, kStringOffset));
4471 __ movq(rbx, Operand(rsp, kFromOffset));
4472 {
4473 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
4474 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4475 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4476 }
4477
4478 // rax: result string
4479 // rcx: result length
4480 // rdx: original value of rsi
4481 // rdi: first character of result
4482 // rsi: character of sub string start
4483 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
4484 __ movq(rsi, rdx); // Restore esi.
4485
4486 __ bind(&return_rax);
4487 __ IncrementCounter(&Counters::sub_string_native, 1);
4488 __ ret(kArgumentsSize);
4489
4490 // Just jump to runtime to create the sub string.
4491 __ bind(&runtime);
4492 __ TailCallRuntime(Runtime::kSubString, 3, 1);
4493}
4494
4495
4496void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4497 Register left,
4498 Register right,
4499 Register scratch1,
4500 Register scratch2,
4501 Register scratch3,
4502 Register scratch4) {
4503 // Ensure that you can always subtract a string length from a non-negative
4504 // number (e.g. another length).
4505 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
4506
4507 // Find minimum length and length difference.
4508 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
4509 __ movq(scratch4, scratch1);
4510 __ SmiSub(scratch4,
4511 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004512 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004513 // Register scratch4 now holds left.length - right.length.
4514 const Register length_difference = scratch4;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004515 NearLabel left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004516 __ j(less, &left_shorter);
4517 // The right string isn't longer that the left one.
4518 // Get the right string's length by subtracting the (non-negative) difference
4519 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004520 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004521 __ bind(&left_shorter);
4522 // Register scratch1 now holds Min(left.length, right.length).
4523 const Register min_length = scratch1;
4524
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004525 NearLabel compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004526 // If min-length is zero, go directly to comparing lengths.
4527 __ SmiTest(min_length);
4528 __ j(zero, &compare_lengths);
4529
4530 __ SmiToInteger32(min_length, min_length);
4531
4532 // Registers scratch2 and scratch3 are free.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004533 NearLabel result_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004534 Label loop;
4535 {
4536 // Check characters 0 .. min_length - 1 in a loop.
4537 // Use scratch3 as loop index, min_length as limit and scratch2
4538 // for computation.
4539 const Register index = scratch3;
4540 __ movl(index, Immediate(0)); // Index into strings.
4541 __ bind(&loop);
4542 // Compare characters.
4543 // TODO(lrn): Could we load more than one character at a time?
4544 __ movb(scratch2, FieldOperand(left,
4545 index,
4546 times_1,
4547 SeqAsciiString::kHeaderSize));
4548 // Increment index and use -1 modifier on next load to give
4549 // the previous load extra time to complete.
4550 __ addl(index, Immediate(1));
4551 __ cmpb(scratch2, FieldOperand(right,
4552 index,
4553 times_1,
4554 SeqAsciiString::kHeaderSize - 1));
4555 __ j(not_equal, &result_not_equal);
4556 __ cmpl(index, min_length);
4557 __ j(not_equal, &loop);
4558 }
4559 // Completed loop without finding different characters.
4560 // Compare lengths (precomputed).
4561 __ bind(&compare_lengths);
4562 __ SmiTest(length_difference);
4563 __ j(not_zero, &result_not_equal);
4564
4565 // Result is EQUAL.
4566 __ Move(rax, Smi::FromInt(EQUAL));
4567 __ ret(0);
4568
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004569 NearLabel result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004570 __ bind(&result_not_equal);
4571 // Unequal comparison of left to right, either character or length.
4572 __ j(greater, &result_greater);
4573
4574 // Result is LESS.
4575 __ Move(rax, Smi::FromInt(LESS));
4576 __ ret(0);
4577
4578 // Result is GREATER.
4579 __ bind(&result_greater);
4580 __ Move(rax, Smi::FromInt(GREATER));
4581 __ ret(0);
4582}
4583
4584
4585void StringCompareStub::Generate(MacroAssembler* masm) {
4586 Label runtime;
4587
4588 // Stack frame on entry.
4589 // rsp[0]: return address
4590 // rsp[8]: right string
4591 // rsp[16]: left string
4592
4593 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
4594 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
4595
4596 // Check for identity.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004597 NearLabel not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004598 __ cmpq(rdx, rax);
4599 __ j(not_equal, &not_same);
4600 __ Move(rax, Smi::FromInt(EQUAL));
4601 __ IncrementCounter(&Counters::string_compare_native, 1);
4602 __ ret(2 * kPointerSize);
4603
4604 __ bind(&not_same);
4605
4606 // Check that both are sequential ASCII strings.
4607 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4608
4609 // Inline comparison of ascii strings.
4610 __ IncrementCounter(&Counters::string_compare_native, 1);
4611 // Drop arguments from the stack
4612 __ pop(rcx);
4613 __ addq(rsp, Immediate(2 * kPointerSize));
4614 __ push(rcx);
4615 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4616
4617 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4618 // tagged as a small integer.
4619 __ bind(&runtime);
4620 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4621}
4622
Ben Murdochb0fe1622011-05-05 13:52:32 +01004623void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004624 ASSERT(state_ == CompareIC::SMIS);
4625 NearLabel miss;
4626 __ JumpIfNotBothSmi(rdx, rax, &miss);
4627
4628 if (GetCondition() == equal) {
4629 // For equality we do not care about the sign of the result.
4630 __ subq(rax, rdx);
4631 } else {
4632 NearLabel done;
4633 __ subq(rdx, rax);
4634 __ j(no_overflow, &done);
4635 // Correct sign of result in case of overflow.
4636 __ SmiNot(rdx, rdx);
4637 __ bind(&done);
4638 __ movq(rax, rdx);
4639 }
4640 __ ret(0);
4641
4642 __ bind(&miss);
4643 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004644}
4645
4646
4647void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004648 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
4649
4650 NearLabel generic_stub;
4651 NearLabel unordered;
4652 NearLabel miss;
4653 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
4654 __ j(either_smi, &generic_stub);
4655
4656 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
4657 __ j(not_equal, &miss);
4658 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
4659 __ j(not_equal, &miss);
4660
4661 // Load left and right operand
4662 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
4663 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
4664
4665 // Compare operands
4666 __ ucomisd(xmm0, xmm1);
4667
4668 // Don't base result on EFLAGS when a NaN is involved.
4669 __ j(parity_even, &unordered);
4670
4671 // Return a result of -1, 0, or 1, based on EFLAGS.
4672 // Performing mov, because xor would destroy the flag register.
4673 __ movl(rax, Immediate(0));
4674 __ movl(rcx, Immediate(0));
4675 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
4676 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
4677 __ ret(0);
4678
4679 __ bind(&unordered);
4680
4681 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
4682 __ bind(&generic_stub);
4683 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4684
4685 __ bind(&miss);
4686 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004687}
4688
4689
4690void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004691 ASSERT(state_ == CompareIC::OBJECTS);
4692 NearLabel miss;
4693 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4694 __ j(either_smi, &miss);
4695
4696 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
4697 __ j(not_equal, &miss, not_taken);
4698 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
4699 __ j(not_equal, &miss, not_taken);
4700
4701 ASSERT(GetCondition() == equal);
4702 __ subq(rax, rdx);
4703 __ ret(0);
4704
4705 __ bind(&miss);
4706 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004707}
4708
4709
4710void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01004711 // Save the registers.
4712 __ pop(rcx);
4713 __ push(rdx);
4714 __ push(rax);
4715 __ push(rcx);
4716
4717 // Call the runtime system in a fresh internal frame.
4718 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
4719 __ EnterInternalFrame();
4720 __ push(rdx);
4721 __ push(rax);
4722 __ Push(Smi::FromInt(op_));
4723 __ CallExternalReference(miss, 3);
4724 __ LeaveInternalFrame();
4725
4726 // Compute the entry point of the rewritten stub.
4727 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4728
4729 // Restore registers.
4730 __ pop(rcx);
4731 __ pop(rax);
4732 __ pop(rdx);
4733 __ push(rcx);
4734
4735 // Do a tail call to the rewritten stub.
4736 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004737}
4738
Steve Block1e0659c2011-05-24 12:43:12 +01004739
4740void GenerateFastPixelArrayLoad(MacroAssembler* masm,
4741 Register receiver,
4742 Register key,
4743 Register elements,
4744 Register untagged_key,
4745 Register result,
4746 Label* not_pixel_array,
4747 Label* key_not_smi,
4748 Label* out_of_range) {
4749 // Register use:
4750 // receiver - holds the receiver and is unchanged.
4751 // key - holds the key and is unchanged (must be a smi).
4752 // elements - is set to the the receiver's element if
4753 // the receiver doesn't have a pixel array or the
4754 // key is not a smi, otherwise it's the elements'
4755 // external pointer.
4756 // untagged_key - is set to the untagged key
4757
4758 // Some callers already have verified that the key is a smi. key_not_smi is
4759 // set to NULL as a sentinel for that case. Otherwise, add an explicit check
4760 // to ensure the key is a smi must be added.
4761 if (key_not_smi != NULL) {
4762 __ JumpIfNotSmi(key, key_not_smi);
4763 } else {
4764 if (FLAG_debug_code) {
4765 __ AbortIfNotSmi(key);
4766 }
4767 }
4768 __ SmiToInteger32(untagged_key, key);
4769
4770 // Verify that the receiver has pixel array elements.
4771 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
4772 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
4773
4774 // Check that the smi is in range.
4775 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
4776 __ j(above_equal, out_of_range); // unsigned check handles negative keys.
4777
4778 // Load and tag the element as a smi.
4779 __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
4780 __ movzxbq(result, Operand(elements, untagged_key, times_1, 0));
4781 __ Integer32ToSmi(result, result);
4782 __ ret(0);
4783}
4784
4785
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004786#undef __
4787
4788} } // namespace v8::internal
4789
4790#endif // V8_TARGET_ARCH_X64