blob: 0fb827bb08fbbd49bf4e6067222407c7b6128a9f [file] [log] [blame]
Steve Block1e0659c2011-05-24 12:43:12 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "bootstrapper.h"
33#include "code-stubs.h"
34#include "regexp-macro-assembler.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010040
41void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax.
43 NearLabel check_heap_number, call_builtin;
44 __ SmiTest(rax);
45 __ j(not_zero, &check_heap_number);
46 __ Ret();
47
48 __ bind(&check_heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +010049 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
50 Heap::kHeapNumberMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +010051 __ j(not_equal, &call_builtin);
52 __ Ret();
53
54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address.
56 __ push(rax);
57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59}
60
61
Kristian Monsen80d68ea2010-09-08 11:05:35 +010062void FastNewClosureStub::Generate(MacroAssembler* masm) {
63 // Create a new closure from the given function info in new
64 // space. Set the context to the current context in rsi.
65 Label gc;
66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
67
68 // Get the function info from the stack.
69 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
70
Steve Block44f0eee2011-05-26 01:26:41 +010071 int map_index = strict_mode_ == kStrictMode
72 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
73 : Context::FUNCTION_MAP_INDEX;
74
Kristian Monsen80d68ea2010-09-08 11:05:35 +010075 // Compute the function map in the current global context and set that
76 // as the map of the allocated object.
77 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
78 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +010079 __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +010080 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
81
82 // Initialize the rest of the function. We don't have to update the
83 // write barrier because the allocated object is in new space.
84 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
85 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +010086 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010087 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
88 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
89 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
90 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
91 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
92 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +010093 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010094
95 // Initialize the code pointer in the function to be the one
96 // found in the shared function info object.
97 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
98 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
99 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
100
101
102 // Return and remove the on-stack parameter.
103 __ ret(1 * kPointerSize);
104
105 // Create a new closure through the slower runtime call.
106 __ bind(&gc);
107 __ pop(rcx); // Temporarily remove return address.
108 __ pop(rdx);
109 __ push(rsi);
110 __ push(rdx);
Steve Block44f0eee2011-05-26 01:26:41 +0100111 __ PushRoot(Heap::kFalseValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100112 __ push(rcx); // Restore return address.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800113 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100114}
115
116
117void FastNewContextStub::Generate(MacroAssembler* masm) {
118 // Try to allocate the context in new space.
119 Label gc;
120 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
121 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
122 rax, rbx, rcx, &gc, TAG_OBJECT);
123
124 // Get the function from the stack.
125 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
126
127 // Setup the object header.
128 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
129 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
130 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
131
132 // Setup the fixed slots.
Steve Block9fac8402011-05-12 15:51:54 +0100133 __ Set(rbx, 0); // Set to NULL.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100134 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
135 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
136 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
137 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
138
139 // Copy the global object from the surrounding context.
140 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
141 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
142
143 // Initialize the rest of the slots to undefined.
144 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
145 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
146 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
147 }
148
149 // Return and remove the on-stack parameter.
150 __ movq(rsi, rax);
151 __ ret(1 * kPointerSize);
152
153 // Need to collect. Call into runtime system.
154 __ bind(&gc);
155 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
156}
157
158
159void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
160 // Stack layout on entry:
161 //
162 // [rsp + kPointerSize]: constant elements.
163 // [rsp + (2 * kPointerSize)]: literal index.
164 // [rsp + (3 * kPointerSize)]: literals array.
165
166 // All sizes here are multiples of kPointerSize.
167 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
168 int size = JSArray::kSize + elements_size;
169
170 // Load boilerplate object into rcx and check if we need to create a
171 // boilerplate.
172 Label slow_case;
173 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
174 __ movq(rax, Operand(rsp, 2 * kPointerSize));
175 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
176 __ movq(rcx,
177 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
178 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
179 __ j(equal, &slow_case);
180
181 if (FLAG_debug_code) {
182 const char* message;
183 Heap::RootListIndex expected_map_index;
184 if (mode_ == CLONE_ELEMENTS) {
185 message = "Expected (writable) fixed array";
186 expected_map_index = Heap::kFixedArrayMapRootIndex;
187 } else {
188 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
189 message = "Expected copy-on-write fixed array";
190 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
191 }
192 __ push(rcx);
193 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
194 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
195 expected_map_index);
196 __ Assert(equal, message);
197 __ pop(rcx);
198 }
199
200 // Allocate both the JS array and the elements array in one big
201 // allocation. This avoids multiple limit checks.
202 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
203
204 // Copy the JS array part.
205 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
206 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
207 __ movq(rbx, FieldOperand(rcx, i));
208 __ movq(FieldOperand(rax, i), rbx);
209 }
210 }
211
212 if (length_ > 0) {
213 // Get hold of the elements array of the boilerplate and setup the
214 // elements pointer in the resulting object.
215 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
216 __ lea(rdx, Operand(rax, JSArray::kSize));
217 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
218
219 // Copy the elements array.
220 for (int i = 0; i < elements_size; i += kPointerSize) {
221 __ movq(rbx, FieldOperand(rcx, i));
222 __ movq(FieldOperand(rdx, i), rbx);
223 }
224 }
225
226 // Return and remove the on-stack parameters.
227 __ ret(3 * kPointerSize);
228
229 __ bind(&slow_case);
230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
231}
232
233
234void ToBooleanStub::Generate(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100235 NearLabel false_result, true_result, not_string;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100236 __ movq(rax, Operand(rsp, 1 * kPointerSize));
237
238 // 'null' => false.
239 __ CompareRoot(rax, Heap::kNullValueRootIndex);
240 __ j(equal, &false_result);
241
242 // Get the map and type of the heap object.
243 // We don't use CmpObjectType because we manipulate the type field.
244 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
245 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
246
247 // Undetectable => false.
248 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
249 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
250 __ j(not_zero, &false_result);
251
252 // JavaScript object => true.
253 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
254 __ j(above_equal, &true_result);
255
256 // String value => false iff empty.
257 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
258 __ j(above_equal, &not_string);
259 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
260 __ SmiTest(rdx);
261 __ j(zero, &false_result);
262 __ jmp(&true_result);
263
264 __ bind(&not_string);
265 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
266 __ j(not_equal, &true_result);
267 // HeapNumber => false iff +0, -0, or NaN.
268 // These three cases set the zero flag when compared to zero using ucomisd.
269 __ xorpd(xmm0, xmm0);
270 __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
271 __ j(zero, &false_result);
272 // Fall through to |true_result|.
273
274 // Return 1/0 for true/false in rax.
275 __ bind(&true_result);
276 __ movq(rax, Immediate(1));
277 __ ret(1 * kPointerSize);
278 __ bind(&false_result);
Steve Block9fac8402011-05-12 15:51:54 +0100279 __ Set(rax, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100280 __ ret(1 * kPointerSize);
281}
282
283
284const char* GenericBinaryOpStub::GetName() {
285 if (name_ != NULL) return name_;
286 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +0100287 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
288 kMaxNameLength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100289 if (name_ == NULL) return "OOM";
290 const char* op_name = Token::Name(op_);
291 const char* overwrite_name;
292 switch (mode_) {
293 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
294 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
295 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
296 default: overwrite_name = "UnknownOverwrite"; break;
297 }
298
299 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
300 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
301 op_name,
302 overwrite_name,
303 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
304 args_in_registers_ ? "RegArgs" : "StackArgs",
305 args_reversed_ ? "_R" : "",
306 static_operands_type_.ToString(),
307 BinaryOpIC::GetName(runtime_operands_type_));
308 return name_;
309}
310
311
312void GenericBinaryOpStub::GenerateCall(
313 MacroAssembler* masm,
314 Register left,
315 Register right) {
316 if (!ArgsInRegistersSupported()) {
317 // Pass arguments on the stack.
318 __ push(left);
319 __ push(right);
320 } else {
321 // The calling convention with registers is left in rdx and right in rax.
322 Register left_arg = rdx;
323 Register right_arg = rax;
324 if (!(left.is(left_arg) && right.is(right_arg))) {
325 if (left.is(right_arg) && right.is(left_arg)) {
326 if (IsOperationCommutative()) {
327 SetArgsReversed();
328 } else {
329 __ xchg(left, right);
330 }
331 } else if (left.is(left_arg)) {
332 __ movq(right_arg, right);
333 } else if (right.is(right_arg)) {
334 __ movq(left_arg, left);
335 } else if (left.is(right_arg)) {
336 if (IsOperationCommutative()) {
337 __ movq(left_arg, right);
338 SetArgsReversed();
339 } else {
340 // Order of moves important to avoid destroying left argument.
341 __ movq(left_arg, left);
342 __ movq(right_arg, right);
343 }
344 } else if (right.is(left_arg)) {
345 if (IsOperationCommutative()) {
346 __ movq(right_arg, left);
347 SetArgsReversed();
348 } else {
349 // Order of moves important to avoid destroying right argument.
350 __ movq(right_arg, right);
351 __ movq(left_arg, left);
352 }
353 } else {
354 // Order of moves is not important.
355 __ movq(left_arg, left);
356 __ movq(right_arg, right);
357 }
358 }
359
360 // Update flags to indicate that arguments are in registers.
361 SetArgsInRegisters();
Steve Block44f0eee2011-05-26 01:26:41 +0100362 Counters* counters = masm->isolate()->counters();
363 __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100364 }
365
366 // Call the stub.
367 __ CallStub(this);
368}
369
370
371void GenericBinaryOpStub::GenerateCall(
372 MacroAssembler* masm,
373 Register left,
374 Smi* right) {
375 if (!ArgsInRegistersSupported()) {
376 // Pass arguments on the stack.
377 __ push(left);
378 __ Push(right);
379 } else {
380 // The calling convention with registers is left in rdx and right in rax.
381 Register left_arg = rdx;
382 Register right_arg = rax;
383 if (left.is(left_arg)) {
384 __ Move(right_arg, right);
385 } else if (left.is(right_arg) && IsOperationCommutative()) {
386 __ Move(left_arg, right);
387 SetArgsReversed();
388 } else {
389 // For non-commutative operations, left and right_arg might be
390 // the same register. Therefore, the order of the moves is
391 // important here in order to not overwrite left before moving
392 // it to left_arg.
393 __ movq(left_arg, left);
394 __ Move(right_arg, right);
395 }
396
397 // Update flags to indicate that arguments are in registers.
398 SetArgsInRegisters();
Steve Block44f0eee2011-05-26 01:26:41 +0100399 Counters* counters = masm->isolate()->counters();
400 __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100401 }
402
403 // Call the stub.
404 __ CallStub(this);
405}
406
407
408void GenericBinaryOpStub::GenerateCall(
409 MacroAssembler* masm,
410 Smi* left,
411 Register right) {
412 if (!ArgsInRegistersSupported()) {
413 // Pass arguments on the stack.
414 __ Push(left);
415 __ push(right);
416 } else {
417 // The calling convention with registers is left in rdx and right in rax.
418 Register left_arg = rdx;
419 Register right_arg = rax;
420 if (right.is(right_arg)) {
421 __ Move(left_arg, left);
422 } else if (right.is(left_arg) && IsOperationCommutative()) {
423 __ Move(right_arg, left);
424 SetArgsReversed();
425 } else {
426 // For non-commutative operations, right and left_arg might be
427 // the same register. Therefore, the order of the moves is
428 // important here in order to not overwrite right before moving
429 // it to right_arg.
430 __ movq(right_arg, right);
431 __ Move(left_arg, left);
432 }
433 // Update flags to indicate that arguments are in registers.
434 SetArgsInRegisters();
Steve Block44f0eee2011-05-26 01:26:41 +0100435 Counters* counters = masm->isolate()->counters();
436 __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100437 }
438
439 // Call the stub.
440 __ CallStub(this);
441}
442
443
444class FloatingPointHelper : public AllStatic {
445 public:
446 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
447 // If the operands are not both numbers, jump to not_numbers.
448 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
449 // NumberOperands assumes both are smis or heap numbers.
450 static void LoadSSE2SmiOperands(MacroAssembler* masm);
451 static void LoadSSE2NumberOperands(MacroAssembler* masm);
452 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
453 Label* not_numbers);
454
455 // Takes the operands in rdx and rax and loads them as integers in rax
456 // and rcx.
457 static void LoadAsIntegers(MacroAssembler* masm,
458 Label* operand_conversion_failure,
459 Register heap_number_map);
460 // As above, but we know the operands to be numbers. In that case,
461 // conversion can't fail.
462 static void LoadNumbersAsIntegers(MacroAssembler* masm);
463};
464
465
466void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
467 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
468 // dividend in rax and rdx free for the division. Use rax, rbx for those.
469 Comment load_comment(masm, "-- Load arguments");
470 Register left = rdx;
471 Register right = rax;
472 if (op_ == Token::DIV || op_ == Token::MOD) {
473 left = rax;
474 right = rbx;
475 if (HasArgsInRegisters()) {
476 __ movq(rbx, rax);
477 __ movq(rax, rdx);
478 }
479 }
480 if (!HasArgsInRegisters()) {
481 __ movq(right, Operand(rsp, 1 * kPointerSize));
482 __ movq(left, Operand(rsp, 2 * kPointerSize));
483 }
484
485 Label not_smis;
486 // 2. Smi check both operands.
487 if (static_operands_type_.IsSmi()) {
488 // Skip smi check if we know that both arguments are smis.
489 if (FLAG_debug_code) {
490 __ AbortIfNotSmi(left);
491 __ AbortIfNotSmi(right);
492 }
493 if (op_ == Token::BIT_OR) {
494 // Handle OR here, since we do extra smi-checking in the or code below.
495 __ SmiOr(right, right, left);
496 GenerateReturn(masm);
497 return;
498 }
499 } else {
500 if (op_ != Token::BIT_OR) {
501 // Skip the check for OR as it is better combined with the
502 // actual operation.
503 Comment smi_check_comment(masm, "-- Smi check arguments");
504 __ JumpIfNotBothSmi(left, right, &not_smis);
505 }
506 }
507
508 // 3. Operands are both smis (except for OR), perform the operation leaving
509 // the result in rax and check the result if necessary.
510 Comment perform_smi(masm, "-- Perform smi operation");
511 Label use_fp_on_smis;
512 switch (op_) {
513 case Token::ADD: {
514 ASSERT(right.is(rax));
515 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
516 break;
517 }
518
519 case Token::SUB: {
520 __ SmiSub(left, left, right, &use_fp_on_smis);
521 __ movq(rax, left);
522 break;
523 }
524
525 case Token::MUL:
526 ASSERT(right.is(rax));
527 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
528 break;
529
530 case Token::DIV:
531 ASSERT(left.is(rax));
532 __ SmiDiv(left, left, right, &use_fp_on_smis);
533 break;
534
535 case Token::MOD:
536 ASSERT(left.is(rax));
537 __ SmiMod(left, left, right, slow);
538 break;
539
540 case Token::BIT_OR:
541 ASSERT(right.is(rax));
542 __ movq(rcx, right); // Save the right operand.
543 __ SmiOr(right, right, left); // BIT_OR is commutative.
544 __ testb(right, Immediate(kSmiTagMask));
545 __ j(not_zero, &not_smis);
546 break;
547
548 case Token::BIT_AND:
549 ASSERT(right.is(rax));
550 __ SmiAnd(right, right, left); // BIT_AND is commutative.
551 break;
552
553 case Token::BIT_XOR:
554 ASSERT(right.is(rax));
555 __ SmiXor(right, right, left); // BIT_XOR is commutative.
556 break;
557
558 case Token::SHL:
559 case Token::SHR:
560 case Token::SAR:
561 switch (op_) {
562 case Token::SAR:
563 __ SmiShiftArithmeticRight(left, left, right);
564 break;
565 case Token::SHR:
566 __ SmiShiftLogicalRight(left, left, right, slow);
567 break;
568 case Token::SHL:
569 __ SmiShiftLeft(left, left, right);
570 break;
571 default:
572 UNREACHABLE();
573 }
574 __ movq(rax, left);
575 break;
576
577 default:
578 UNREACHABLE();
579 break;
580 }
581
582 // 4. Emit return of result in rax.
583 GenerateReturn(masm);
584
585 // 5. For some operations emit inline code to perform floating point
586 // operations on known smis (e.g., if the result of the operation
587 // overflowed the smi range).
588 switch (op_) {
589 case Token::ADD:
590 case Token::SUB:
591 case Token::MUL:
592 case Token::DIV: {
593 ASSERT(use_fp_on_smis.is_linked());
594 __ bind(&use_fp_on_smis);
595 if (op_ == Token::DIV) {
596 __ movq(rdx, rax);
597 __ movq(rax, rbx);
598 }
599 // left is rdx, right is rax.
600 __ AllocateHeapNumber(rbx, rcx, slow);
601 FloatingPointHelper::LoadSSE2SmiOperands(masm);
602 switch (op_) {
603 case Token::ADD: __ addsd(xmm0, xmm1); break;
604 case Token::SUB: __ subsd(xmm0, xmm1); break;
605 case Token::MUL: __ mulsd(xmm0, xmm1); break;
606 case Token::DIV: __ divsd(xmm0, xmm1); break;
607 default: UNREACHABLE();
608 }
609 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
610 __ movq(rax, rbx);
611 GenerateReturn(masm);
612 }
613 default:
614 break;
615 }
616
617 // 6. Non-smi operands, fall out to the non-smi code with the operands in
618 // rdx and rax.
619 Comment done_comment(masm, "-- Enter non-smi code");
620 __ bind(&not_smis);
621
622 switch (op_) {
623 case Token::DIV:
624 case Token::MOD:
625 // Operands are in rax, rbx at this point.
626 __ movq(rdx, rax);
627 __ movq(rax, rbx);
628 break;
629
630 case Token::BIT_OR:
631 // Right operand is saved in rcx and rax was destroyed by the smi
632 // operation.
633 __ movq(rax, rcx);
634 break;
635
636 default:
637 break;
638 }
639}
640
641
642void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
643 Label call_runtime;
644
645 if (ShouldGenerateSmiCode()) {
646 GenerateSmiCode(masm, &call_runtime);
647 } else if (op_ != Token::MOD) {
648 if (!HasArgsInRegisters()) {
649 GenerateLoadArguments(masm);
650 }
651 }
652 // Floating point case.
653 if (ShouldGenerateFPCode()) {
654 switch (op_) {
655 case Token::ADD:
656 case Token::SUB:
657 case Token::MUL:
658 case Token::DIV: {
659 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
660 HasSmiCodeInStub()) {
661 // Execution reaches this point when the first non-smi argument occurs
662 // (and only if smi code is generated). This is the right moment to
663 // patch to HEAP_NUMBERS state. The transition is attempted only for
664 // the four basic operations. The stub stays in the DEFAULT state
665 // forever for all other operations (also if smi code is skipped).
666 GenerateTypeTransition(masm);
667 break;
668 }
669
670 Label not_floats;
671 // rax: y
672 // rdx: x
673 if (static_operands_type_.IsNumber()) {
674 if (FLAG_debug_code) {
675 // Assert at runtime that inputs are only numbers.
676 __ AbortIfNotNumber(rdx);
677 __ AbortIfNotNumber(rax);
678 }
679 FloatingPointHelper::LoadSSE2NumberOperands(masm);
680 } else {
681 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &call_runtime);
682 }
683
684 switch (op_) {
685 case Token::ADD: __ addsd(xmm0, xmm1); break;
686 case Token::SUB: __ subsd(xmm0, xmm1); break;
687 case Token::MUL: __ mulsd(xmm0, xmm1); break;
688 case Token::DIV: __ divsd(xmm0, xmm1); break;
689 default: UNREACHABLE();
690 }
691 // Allocate a heap number, if needed.
692 Label skip_allocation;
693 OverwriteMode mode = mode_;
694 if (HasArgsReversed()) {
695 if (mode == OVERWRITE_RIGHT) {
696 mode = OVERWRITE_LEFT;
697 } else if (mode == OVERWRITE_LEFT) {
698 mode = OVERWRITE_RIGHT;
699 }
700 }
701 switch (mode) {
702 case OVERWRITE_LEFT:
703 __ JumpIfNotSmi(rdx, &skip_allocation);
704 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
705 __ movq(rdx, rbx);
706 __ bind(&skip_allocation);
707 __ movq(rax, rdx);
708 break;
709 case OVERWRITE_RIGHT:
710 // If the argument in rax is already an object, we skip the
711 // allocation of a heap number.
712 __ JumpIfNotSmi(rax, &skip_allocation);
713 // Fall through!
714 case NO_OVERWRITE:
715 // Allocate a heap number for the result. Keep rax and rdx intact
716 // for the possible runtime call.
717 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
718 __ movq(rax, rbx);
719 __ bind(&skip_allocation);
720 break;
721 default: UNREACHABLE();
722 }
723 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
724 GenerateReturn(masm);
725 __ bind(&not_floats);
726 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
727 !HasSmiCodeInStub()) {
728 // Execution reaches this point when the first non-number argument
729 // occurs (and only if smi code is skipped from the stub, otherwise
730 // the patching has already been done earlier in this case branch).
731 // A perfect moment to try patching to STRINGS for ADD operation.
732 if (op_ == Token::ADD) {
733 GenerateTypeTransition(masm);
734 }
735 }
736 break;
737 }
738 case Token::MOD: {
739 // For MOD we go directly to runtime in the non-smi case.
740 break;
741 }
742 case Token::BIT_OR:
743 case Token::BIT_AND:
744 case Token::BIT_XOR:
745 case Token::SAR:
746 case Token::SHL:
747 case Token::SHR: {
748 Label skip_allocation, non_smi_shr_result;
749 Register heap_number_map = r9;
750 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
751 if (static_operands_type_.IsNumber()) {
752 if (FLAG_debug_code) {
753 // Assert at runtime that inputs are only numbers.
754 __ AbortIfNotNumber(rdx);
755 __ AbortIfNotNumber(rax);
756 }
757 FloatingPointHelper::LoadNumbersAsIntegers(masm);
758 } else {
759 FloatingPointHelper::LoadAsIntegers(masm,
760 &call_runtime,
761 heap_number_map);
762 }
763 switch (op_) {
764 case Token::BIT_OR: __ orl(rax, rcx); break;
765 case Token::BIT_AND: __ andl(rax, rcx); break;
766 case Token::BIT_XOR: __ xorl(rax, rcx); break;
767 case Token::SAR: __ sarl_cl(rax); break;
768 case Token::SHL: __ shll_cl(rax); break;
769 case Token::SHR: {
770 __ shrl_cl(rax);
771 // Check if result is negative. This can only happen for a shift
772 // by zero.
773 __ testl(rax, rax);
774 __ j(negative, &non_smi_shr_result);
775 break;
776 }
777 default: UNREACHABLE();
778 }
779
780 STATIC_ASSERT(kSmiValueSize == 32);
781 // Tag smi result and return.
782 __ Integer32ToSmi(rax, rax);
783 GenerateReturn(masm);
784
785 // All bit-ops except SHR return a signed int32 that can be
786 // returned immediately as a smi.
787 // We might need to allocate a HeapNumber if we shift a negative
788 // number right by zero (i.e., convert to UInt32).
789 if (op_ == Token::SHR) {
790 ASSERT(non_smi_shr_result.is_linked());
791 __ bind(&non_smi_shr_result);
792 // Allocate a heap number if needed.
793 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
794 switch (mode_) {
795 case OVERWRITE_LEFT:
796 case OVERWRITE_RIGHT:
797 // If the operand was an object, we skip the
798 // allocation of a heap number.
799 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
800 1 * kPointerSize : 2 * kPointerSize));
801 __ JumpIfNotSmi(rax, &skip_allocation);
802 // Fall through!
803 case NO_OVERWRITE:
804 // Allocate heap number in new space.
805 // Not using AllocateHeapNumber macro in order to reuse
806 // already loaded heap_number_map.
807 __ AllocateInNewSpace(HeapNumber::kSize,
808 rax,
809 rcx,
810 no_reg,
811 &call_runtime,
812 TAG_OBJECT);
813 // Set the map.
814 if (FLAG_debug_code) {
815 __ AbortIfNotRootValue(heap_number_map,
816 Heap::kHeapNumberMapRootIndex,
817 "HeapNumberMap register clobbered.");
818 }
819 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
820 heap_number_map);
821 __ bind(&skip_allocation);
822 break;
823 default: UNREACHABLE();
824 }
825 // Store the result in the HeapNumber and return.
826 __ cvtqsi2sd(xmm0, rbx);
827 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
828 GenerateReturn(masm);
829 }
830
831 break;
832 }
833 default: UNREACHABLE(); break;
834 }
835 }
836
837 // If all else fails, use the runtime system to get the correct
838 // result. If arguments was passed in registers now place them on the
839 // stack in the correct order below the return address.
840 __ bind(&call_runtime);
841
842 if (HasArgsInRegisters()) {
843 GenerateRegisterArgsPush(masm);
844 }
845
846 switch (op_) {
847 case Token::ADD: {
848 // Registers containing left and right operands respectively.
849 Register lhs, rhs;
850
851 if (HasArgsReversed()) {
852 lhs = rax;
853 rhs = rdx;
854 } else {
855 lhs = rdx;
856 rhs = rax;
857 }
858
859 // Test for string arguments before calling runtime.
860 Label not_strings, both_strings, not_string1, string1, string1_smi2;
861
862 // If this stub has already generated FP-specific code then the arguments
863 // are already in rdx and rax.
864 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
865 GenerateLoadArguments(masm);
866 }
867
868 Condition is_smi;
869 is_smi = masm->CheckSmi(lhs);
870 __ j(is_smi, &not_string1);
871 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
872 __ j(above_equal, &not_string1);
873
874 // First argument is a a string, test second.
875 is_smi = masm->CheckSmi(rhs);
876 __ j(is_smi, &string1_smi2);
877 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
878 __ j(above_equal, &string1);
879
880 // First and second argument are strings.
881 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
882 __ TailCallStub(&string_add_stub);
883
884 __ bind(&string1_smi2);
885 // First argument is a string, second is a smi. Try to lookup the number
886 // string for the smi in the number string cache.
887 NumberToStringStub::GenerateLookupNumberStringCache(
888 masm, rhs, rbx, rcx, r8, true, &string1);
889
890 // Replace second argument on stack and tailcall string add stub to make
891 // the result.
892 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
893 __ TailCallStub(&string_add_stub);
894
895 // Only first argument is a string.
896 __ bind(&string1);
897 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
898
899 // First argument was not a string, test second.
900 __ bind(&not_string1);
901 is_smi = masm->CheckSmi(rhs);
902 __ j(is_smi, &not_strings);
903 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
904 __ j(above_equal, &not_strings);
905
906 // Only second argument is a string.
907 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
908
909 __ bind(&not_strings);
910 // Neither argument is a string.
911 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
912 break;
913 }
914 case Token::SUB:
915 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
916 break;
917 case Token::MUL:
918 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
919 break;
920 case Token::DIV:
921 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
922 break;
923 case Token::MOD:
924 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
925 break;
926 case Token::BIT_OR:
927 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
928 break;
929 case Token::BIT_AND:
930 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
931 break;
932 case Token::BIT_XOR:
933 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
934 break;
935 case Token::SAR:
936 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
937 break;
938 case Token::SHL:
939 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
940 break;
941 case Token::SHR:
942 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
943 break;
944 default:
945 UNREACHABLE();
946 }
947}
948
949
950void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
951 ASSERT(!HasArgsInRegisters());
952 __ movq(rax, Operand(rsp, 1 * kPointerSize));
953 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
954}
955
956
957void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
958 // If arguments are not passed in registers remove them from the stack before
959 // returning.
960 if (!HasArgsInRegisters()) {
961 __ ret(2 * kPointerSize); // Remove both operands
962 } else {
963 __ ret(0);
964 }
965}
966
967
968void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
969 ASSERT(HasArgsInRegisters());
970 __ pop(rcx);
971 if (HasArgsReversed()) {
972 __ push(rax);
973 __ push(rdx);
974 } else {
975 __ push(rdx);
976 __ push(rax);
977 }
978 __ push(rcx);
979}
980
981
982void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
983 Label get_result;
984
985 // Ensure the operands are on the stack.
986 if (HasArgsInRegisters()) {
987 GenerateRegisterArgsPush(masm);
988 }
989
990 // Left and right arguments are already on stack.
991 __ pop(rcx); // Save the return address.
992
993 // Push this stub's key.
994 __ Push(Smi::FromInt(MinorKey()));
995
996 // Although the operation and the type info are encoded into the key,
997 // the encoding is opaque, so push them too.
998 __ Push(Smi::FromInt(op_));
999
1000 __ Push(Smi::FromInt(runtime_operands_type_));
1001
1002 __ push(rcx); // The return address.
1003
1004 // Perform patching to an appropriate fast case and return the result.
1005 __ TailCallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +01001006 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001007 5,
1008 1);
1009}
1010
1011
1012Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1013 GenericBinaryOpStub stub(key, type_info);
1014 return stub.GetCode();
1015}
1016
1017
Ben Murdochb0fe1622011-05-05 13:52:32 +01001018Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1019 TRBinaryOpIC::TypeInfo type_info,
1020 TRBinaryOpIC::TypeInfo result_type_info) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001021 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1022 return stub.GetCode();
1023}
1024
1025
1026void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1027 __ pop(rcx); // Save return address.
1028 __ push(rdx);
1029 __ push(rax);
1030 // Left and right arguments are now on top.
1031 // Push this stub's key. Although the operation and the type info are
1032 // encoded into the key, the encoding is opaque, so push them too.
1033 __ Push(Smi::FromInt(MinorKey()));
1034 __ Push(Smi::FromInt(op_));
1035 __ Push(Smi::FromInt(operands_type_));
1036
1037 __ push(rcx); // Push return address.
1038
1039 // Patch the caller to an appropriate specialized stub and return the
1040 // operation result to the caller of the stub.
1041 __ TailCallExternalReference(
Steve Block44f0eee2011-05-26 01:26:41 +01001042 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
1043 masm->isolate()),
Ben Murdoch086aeea2011-05-13 15:57:08 +01001044 5,
1045 1);
1046}
1047
1048
Ben Murdoch086aeea2011-05-13 15:57:08 +01001049void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1050 switch (operands_type_) {
1051 case TRBinaryOpIC::UNINITIALIZED:
1052 GenerateTypeTransition(masm);
1053 break;
1054 case TRBinaryOpIC::SMI:
1055 GenerateSmiStub(masm);
1056 break;
1057 case TRBinaryOpIC::INT32:
Steve Block1e0659c2011-05-24 12:43:12 +01001058 UNREACHABLE();
1059 // The int32 case is identical to the Smi case. We avoid creating this
1060 // ic state on x64.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001061 break;
1062 case TRBinaryOpIC::HEAP_NUMBER:
1063 GenerateHeapNumberStub(masm);
1064 break;
Steve Block44f0eee2011-05-26 01:26:41 +01001065 case TRBinaryOpIC::ODDBALL:
1066 GenerateOddballStub(masm);
1067 break;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001068 case TRBinaryOpIC::STRING:
1069 GenerateStringStub(masm);
1070 break;
1071 case TRBinaryOpIC::GENERIC:
1072 GenerateGeneric(masm);
1073 break;
1074 default:
1075 UNREACHABLE();
1076 }
1077}
1078
1079
1080const char* TypeRecordingBinaryOpStub::GetName() {
1081 if (name_ != NULL) return name_;
1082 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +01001083 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
1084 kMaxNameLength);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001085 if (name_ == NULL) return "OOM";
1086 const char* op_name = Token::Name(op_);
1087 const char* overwrite_name;
1088 switch (mode_) {
1089 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1090 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1091 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1092 default: overwrite_name = "UnknownOverwrite"; break;
1093 }
1094
1095 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1096 "TypeRecordingBinaryOpStub_%s_%s_%s",
1097 op_name,
1098 overwrite_name,
1099 TRBinaryOpIC::GetName(operands_type_));
1100 return name_;
1101}
1102
1103
1104void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
1105 Label* slow,
1106 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
Steve Block1e0659c2011-05-24 12:43:12 +01001107
1108 // We only generate heapnumber answers for overflowing calculations
1109 // for the four basic arithmetic operations.
1110 bool generate_inline_heapnumber_results =
1111 (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) &&
1112 (op_ == Token::ADD || op_ == Token::SUB ||
1113 op_ == Token::MUL || op_ == Token::DIV);
1114
1115 // Arguments to TypeRecordingBinaryOpStub are in rdx and rax.
1116 Register left = rdx;
1117 Register right = rax;
1118
1119
1120 // Smi check of both operands. If op is BIT_OR, the check is delayed
1121 // until after the OR operation.
1122 Label not_smis;
1123 Label use_fp_on_smis;
1124 Label restore_MOD_registers; // Only used if op_ == Token::MOD.
1125
1126 if (op_ != Token::BIT_OR) {
1127 Comment smi_check_comment(masm, "-- Smi check arguments");
1128 __ JumpIfNotBothSmi(left, right, &not_smis);
1129 }
1130
1131 // Perform the operation.
1132 Comment perform_smi(masm, "-- Perform smi operation");
1133 switch (op_) {
1134 case Token::ADD:
1135 ASSERT(right.is(rax));
1136 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
1137 break;
1138
1139 case Token::SUB:
1140 __ SmiSub(left, left, right, &use_fp_on_smis);
1141 __ movq(rax, left);
1142 break;
1143
1144 case Token::MUL:
1145 ASSERT(right.is(rax));
1146 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
1147 break;
1148
1149 case Token::DIV:
1150 // SmiDiv will not accept left in rdx or right in rax.
1151 left = rcx;
1152 right = rbx;
1153 __ movq(rbx, rax);
1154 __ movq(rcx, rdx);
1155 __ SmiDiv(rax, left, right, &use_fp_on_smis);
1156 break;
1157
1158 case Token::MOD:
1159 // SmiMod will not accept left in rdx or right in rax.
1160 left = rcx;
1161 right = rbx;
1162 __ movq(rbx, rax);
1163 __ movq(rcx, rdx);
1164 __ SmiMod(rax, left, right, &use_fp_on_smis);
1165 break;
1166
1167 case Token::BIT_OR: {
1168 ASSERT(right.is(rax));
1169 __ movq(rcx, right); // Save the right operand.
1170 __ SmiOr(right, right, left); // BIT_OR is commutative.
1171 __ JumpIfNotSmi(right, &not_smis); // Test delayed until after BIT_OR.
1172 break;
1173 }
1174 case Token::BIT_XOR:
1175 ASSERT(right.is(rax));
1176 __ SmiXor(right, right, left); // BIT_XOR is commutative.
1177 break;
1178
1179 case Token::BIT_AND:
1180 ASSERT(right.is(rax));
1181 __ SmiAnd(right, right, left); // BIT_AND is commutative.
1182 break;
1183
1184 case Token::SHL:
1185 __ SmiShiftLeft(left, left, right);
1186 __ movq(rax, left);
1187 break;
1188
1189 case Token::SAR:
1190 __ SmiShiftArithmeticRight(left, left, right);
1191 __ movq(rax, left);
1192 break;
1193
1194 case Token::SHR:
1195 __ SmiShiftLogicalRight(left, left, right, &not_smis);
1196 __ movq(rax, left);
1197 break;
1198
1199 default:
1200 UNREACHABLE();
1201 }
1202
1203 // 5. Emit return of result in rax. Some operations have registers pushed.
1204 __ ret(0);
1205
1206 // 6. For some operations emit inline code to perform floating point
1207 // operations on known smis (e.g., if the result of the operation
1208 // overflowed the smi range).
1209 __ bind(&use_fp_on_smis);
1210 if (op_ == Token::DIV || op_ == Token::MOD) {
1211 // Restore left and right to rdx and rax.
1212 __ movq(rdx, rcx);
1213 __ movq(rax, rbx);
1214 }
1215
1216
1217 if (generate_inline_heapnumber_results) {
1218 __ AllocateHeapNumber(rcx, rbx, slow);
1219 Comment perform_float(masm, "-- Perform float operation on smis");
1220 FloatingPointHelper::LoadSSE2SmiOperands(masm);
1221 switch (op_) {
1222 case Token::ADD: __ addsd(xmm0, xmm1); break;
1223 case Token::SUB: __ subsd(xmm0, xmm1); break;
1224 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1225 case Token::DIV: __ divsd(xmm0, xmm1); break;
1226 default: UNREACHABLE();
1227 }
1228 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
1229 __ movq(rax, rcx);
1230 __ ret(0);
1231 }
1232
1233 // 7. Non-smi operands reach the end of the code generated by
1234 // GenerateSmiCode, and fall through to subsequent code,
1235 // with the operands in rdx and rax.
1236 Comment done_comment(masm, "-- Enter non-smi code");
1237 __ bind(&not_smis);
1238 if (op_ == Token::BIT_OR) {
1239 __ movq(right, rcx);
1240 }
1241}
1242
1243
1244void TypeRecordingBinaryOpStub::GenerateFloatingPointCode(
1245 MacroAssembler* masm,
1246 Label* allocation_failure,
1247 Label* non_numeric_failure) {
1248 switch (op_) {
1249 case Token::ADD:
1250 case Token::SUB:
1251 case Token::MUL:
1252 case Token::DIV: {
1253 FloatingPointHelper::LoadSSE2UnknownOperands(masm, non_numeric_failure);
1254
1255 switch (op_) {
1256 case Token::ADD: __ addsd(xmm0, xmm1); break;
1257 case Token::SUB: __ subsd(xmm0, xmm1); break;
1258 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1259 case Token::DIV: __ divsd(xmm0, xmm1); break;
1260 default: UNREACHABLE();
1261 }
1262 GenerateHeapResultAllocation(masm, allocation_failure);
1263 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1264 __ ret(0);
1265 break;
1266 }
1267 case Token::MOD: {
1268 // For MOD we jump to the allocation_failure label, to call runtime.
1269 __ jmp(allocation_failure);
1270 break;
1271 }
1272 case Token::BIT_OR:
1273 case Token::BIT_AND:
1274 case Token::BIT_XOR:
1275 case Token::SAR:
1276 case Token::SHL:
1277 case Token::SHR: {
1278 Label non_smi_shr_result;
1279 Register heap_number_map = r9;
1280 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1281 FloatingPointHelper::LoadAsIntegers(masm, non_numeric_failure,
1282 heap_number_map);
1283 switch (op_) {
1284 case Token::BIT_OR: __ orl(rax, rcx); break;
1285 case Token::BIT_AND: __ andl(rax, rcx); break;
1286 case Token::BIT_XOR: __ xorl(rax, rcx); break;
1287 case Token::SAR: __ sarl_cl(rax); break;
1288 case Token::SHL: __ shll_cl(rax); break;
1289 case Token::SHR: {
1290 __ shrl_cl(rax);
1291 // Check if result is negative. This can only happen for a shift
1292 // by zero.
1293 __ testl(rax, rax);
1294 __ j(negative, &non_smi_shr_result);
1295 break;
1296 }
1297 default: UNREACHABLE();
1298 }
1299 STATIC_ASSERT(kSmiValueSize == 32);
1300 // Tag smi result and return.
1301 __ Integer32ToSmi(rax, rax);
1302 __ Ret();
1303
1304 // Logical shift right can produce an unsigned int32 that is not
1305 // an int32, and so is not in the smi range. Allocate a heap number
1306 // in that case.
1307 if (op_ == Token::SHR) {
1308 __ bind(&non_smi_shr_result);
1309 Label allocation_failed;
1310 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
1311 // Allocate heap number in new space.
1312 // Not using AllocateHeapNumber macro in order to reuse
1313 // already loaded heap_number_map.
1314 __ AllocateInNewSpace(HeapNumber::kSize,
1315 rax,
1316 rcx,
1317 no_reg,
1318 &allocation_failed,
1319 TAG_OBJECT);
1320 // Set the map.
1321 if (FLAG_debug_code) {
1322 __ AbortIfNotRootValue(heap_number_map,
1323 Heap::kHeapNumberMapRootIndex,
1324 "HeapNumberMap register clobbered.");
1325 }
1326 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
1327 heap_number_map);
1328 __ cvtqsi2sd(xmm0, rbx);
1329 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
1330 __ Ret();
1331
1332 __ bind(&allocation_failed);
1333 // We need tagged values in rdx and rax for the following code,
1334 // not int32 in rax and rcx.
1335 __ Integer32ToSmi(rax, rcx);
1336 __ Integer32ToSmi(rdx, rax);
1337 __ jmp(allocation_failure);
1338 }
1339 break;
1340 }
1341 default: UNREACHABLE(); break;
1342 }
1343 // No fall-through from this generated code.
1344 if (FLAG_debug_code) {
1345 __ Abort("Unexpected fall-through in "
1346 "TypeRecordingBinaryStub::GenerateFloatingPointCode.");
1347 }
1348}
1349
1350
1351void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001352 ASSERT(op_ == Token::ADD);
1353 NearLabel left_not_string, call_runtime;
1354
Steve Block1e0659c2011-05-24 12:43:12 +01001355 // Registers containing left and right operands respectively.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001356 Register left = rdx;
1357 Register right = rax;
Steve Block1e0659c2011-05-24 12:43:12 +01001358
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001359 // Test if left operand is a string.
1360 __ JumpIfSmi(left, &left_not_string);
1361 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
1362 __ j(above_equal, &left_not_string);
1363 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
1364 GenerateRegisterArgsPush(masm);
1365 __ TailCallStub(&string_add_left_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001366
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001367 // Left operand is not a string, test right.
1368 __ bind(&left_not_string);
1369 __ JumpIfSmi(right, &call_runtime);
1370 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
1371 __ j(above_equal, &call_runtime);
Steve Block1e0659c2011-05-24 12:43:12 +01001372
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001373 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1374 GenerateRegisterArgsPush(masm);
1375 __ TailCallStub(&string_add_right_stub);
Steve Block1e0659c2011-05-24 12:43:12 +01001376
Steve Block1e0659c2011-05-24 12:43:12 +01001377 // Neither argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001378 __ bind(&call_runtime);
Steve Block1e0659c2011-05-24 12:43:12 +01001379}
1380
1381
1382void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) {
1383 GenerateRegisterArgsPush(masm);
1384 switch (op_) {
1385 case Token::ADD:
1386 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
1387 break;
1388 case Token::SUB:
1389 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
1390 break;
1391 case Token::MUL:
1392 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
1393 break;
1394 case Token::DIV:
1395 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
1396 break;
1397 case Token::MOD:
1398 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
1399 break;
1400 case Token::BIT_OR:
1401 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
1402 break;
1403 case Token::BIT_AND:
1404 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
1405 break;
1406 case Token::BIT_XOR:
1407 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
1408 break;
1409 case Token::SAR:
1410 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
1411 break;
1412 case Token::SHL:
1413 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
1414 break;
1415 case Token::SHR:
1416 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1417 break;
1418 default:
1419 UNREACHABLE();
1420 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001421}
1422
1423
1424void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001425 Label not_smi;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001426
Steve Block1e0659c2011-05-24 12:43:12 +01001427 GenerateSmiCode(masm, &not_smi, NO_HEAPNUMBER_RESULTS);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001428
Steve Block1e0659c2011-05-24 12:43:12 +01001429 __ bind(&not_smi);
1430 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001431}
1432
1433
1434void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001435 ASSERT(operands_type_ == TRBinaryOpIC::STRING);
Steve Block1e0659c2011-05-24 12:43:12 +01001436 ASSERT(op_ == Token::ADD);
1437 GenerateStringAddCode(masm);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001438 // Try to add arguments as strings, otherwise, transition to the generic
1439 // TRBinaryOpIC type.
Steve Block1e0659c2011-05-24 12:43:12 +01001440 GenerateTypeTransition(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001441}
1442
1443
Steve Block44f0eee2011-05-26 01:26:41 +01001444void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1445 Label call_runtime;
1446
1447 if (op_ == Token::ADD) {
1448 // Handle string addition here, because it is the only operation
1449 // that does not do a ToNumber conversion on the operands.
1450 GenerateStringAddCode(masm);
1451 }
1452
1453 // Convert oddball arguments to numbers.
1454 NearLabel check, done;
1455 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1456 __ j(not_equal, &check);
1457 if (Token::IsBitOp(op_)) {
1458 __ xor_(rdx, rdx);
1459 } else {
1460 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1461 }
1462 __ jmp(&done);
1463 __ bind(&check);
1464 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1465 __ j(not_equal, &done);
1466 if (Token::IsBitOp(op_)) {
1467 __ xor_(rax, rax);
1468 } else {
1469 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1470 }
1471 __ bind(&done);
1472
1473 GenerateHeapNumberStub(masm);
1474}
1475
1476
Ben Murdoch086aeea2011-05-13 15:57:08 +01001477void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001478 Label gc_required, not_number;
1479 GenerateFloatingPointCode(masm, &gc_required, &not_number);
1480
1481 __ bind(&not_number);
1482 GenerateTypeTransition(masm);
1483
1484 __ bind(&gc_required);
1485 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001486}
1487
1488
1489void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001490 Label call_runtime, call_string_add_or_runtime;
1491
1492 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
1493
1494 GenerateFloatingPointCode(masm, &call_runtime, &call_string_add_or_runtime);
1495
1496 __ bind(&call_string_add_or_runtime);
1497 if (op_ == Token::ADD) {
1498 GenerateStringAddCode(masm);
1499 }
1500
1501 __ bind(&call_runtime);
1502 GenerateCallRuntimeCode(masm);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001503}
1504
1505
1506void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
1507 MacroAssembler* masm,
1508 Label* alloc_failure) {
Steve Block1e0659c2011-05-24 12:43:12 +01001509 Label skip_allocation;
1510 OverwriteMode mode = mode_;
1511 switch (mode) {
1512 case OVERWRITE_LEFT: {
1513 // If the argument in rdx is already an object, we skip the
1514 // allocation of a heap number.
1515 __ JumpIfNotSmi(rdx, &skip_allocation);
1516 // Allocate a heap number for the result. Keep eax and edx intact
1517 // for the possible runtime call.
1518 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1519 // Now rdx can be overwritten losing one of the arguments as we are
1520 // now done and will not need it any more.
1521 __ movq(rdx, rbx);
1522 __ bind(&skip_allocation);
1523 // Use object in rdx as a result holder
1524 __ movq(rax, rdx);
1525 break;
1526 }
1527 case OVERWRITE_RIGHT:
1528 // If the argument in rax is already an object, we skip the
1529 // allocation of a heap number.
1530 __ JumpIfNotSmi(rax, &skip_allocation);
1531 // Fall through!
1532 case NO_OVERWRITE:
1533 // Allocate a heap number for the result. Keep rax and rdx intact
1534 // for the possible runtime call.
1535 __ AllocateHeapNumber(rbx, rcx, alloc_failure);
1536 // Now rax can be overwritten losing one of the arguments as we are
1537 // now done and will not need it any more.
1538 __ movq(rax, rbx);
1539 __ bind(&skip_allocation);
1540 break;
1541 default: UNREACHABLE();
1542 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001543}
1544
1545
1546void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
1547 __ pop(rcx);
1548 __ push(rdx);
1549 __ push(rax);
1550 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001551}
1552
1553
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001554void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001555 // TAGGED case:
1556 // Input:
1557 // rsp[8]: argument (should be number).
1558 // rsp[0]: return address.
1559 // Output:
1560 // rax: tagged double result.
1561 // UNTAGGED case:
1562 // Input::
1563 // rsp[0]: return address.
1564 // xmm1: untagged double input argument
1565 // Output:
1566 // xmm1: untagged double result.
1567
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001568 Label runtime_call;
1569 Label runtime_call_clear_stack;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001570 Label skip_cache;
1571 const bool tagged = (argument_type_ == TAGGED);
1572 if (tagged) {
1573 NearLabel input_not_smi;
1574 NearLabel loaded;
1575 // Test that rax is a number.
1576 __ movq(rax, Operand(rsp, kPointerSize));
1577 __ JumpIfNotSmi(rax, &input_not_smi);
1578 // Input is a smi. Untag and load it onto the FPU stack.
1579 // Then load the bits of the double into rbx.
1580 __ SmiToInteger32(rax, rax);
1581 __ subq(rsp, Immediate(kDoubleSize));
1582 __ cvtlsi2sd(xmm1, rax);
1583 __ movsd(Operand(rsp, 0), xmm1);
1584 __ movq(rbx, xmm1);
1585 __ movq(rdx, xmm1);
1586 __ fld_d(Operand(rsp, 0));
1587 __ addq(rsp, Immediate(kDoubleSize));
1588 __ jmp(&loaded);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001589
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001590 __ bind(&input_not_smi);
1591 // Check if input is a HeapNumber.
1592 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
1593 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1594 __ j(not_equal, &runtime_call);
1595 // Input is a HeapNumber. Push it on the FPU stack and load its
1596 // bits into rbx.
1597 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1598 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1599 __ movq(rdx, rbx);
1600
1601 __ bind(&loaded);
1602 } else { // UNTAGGED.
1603 __ movq(rbx, xmm1);
1604 __ movq(rdx, xmm1);
1605 }
1606
1607 // ST[0] == double value, if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001608 // rbx = bits of double value.
1609 // rdx = also bits of double value.
1610 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
1611 // h = h0 = bits ^ (bits >> 32);
1612 // h ^= h >> 16;
1613 // h ^= h >> 8;
1614 // h = h & (cacheSize - 1);
1615 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
1616 __ sar(rdx, Immediate(32));
1617 __ xorl(rdx, rbx);
1618 __ movl(rcx, rdx);
1619 __ movl(rax, rdx);
1620 __ movl(rdi, rdx);
1621 __ sarl(rdx, Immediate(8));
1622 __ sarl(rcx, Immediate(16));
1623 __ sarl(rax, Immediate(24));
1624 __ xorl(rcx, rdx);
1625 __ xorl(rax, rdi);
1626 __ xorl(rcx, rax);
Steve Block44f0eee2011-05-26 01:26:41 +01001627 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
1628 __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001629
1630 // ST[0] == double value.
1631 // rbx = bits of double value.
1632 // rcx = TranscendentalCache::hash(double value).
Steve Block44f0eee2011-05-26 01:26:41 +01001633 ExternalReference cache_array =
1634 ExternalReference::transcendental_cache_array_address(masm->isolate());
1635 __ movq(rax, cache_array);
1636 int cache_array_index =
1637 type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
1638 __ movq(rax, Operand(rax, cache_array_index));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001639 // rax points to the cache for the type type_.
1640 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1641 __ testq(rax, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001642 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001643#ifdef DEBUG
1644 // Check that the layout of cache elements match expectations.
1645 { // NOLINT - doesn't like a single brace on a line.
Steve Block44f0eee2011-05-26 01:26:41 +01001646 TranscendentalCache::SubCache::Element test_elem[2];
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001647 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1648 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1649 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1650 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1651 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1652 // Two uint_32's and a pointer per element.
1653 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
1654 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
1655 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
1656 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
1657 }
1658#endif
1659 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
1660 __ addl(rcx, rcx);
1661 __ lea(rcx, Operand(rax, rcx, times_8, 0));
1662 // Check if cache matches: Double value is stored in uint32_t[2] array.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001663 NearLabel cache_miss;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001664 __ cmpq(rbx, Operand(rcx, 0));
1665 __ j(not_equal, &cache_miss);
1666 // Cache hit!
1667 __ movq(rax, Operand(rcx, 2 * kIntSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001668 if (tagged) {
1669 __ fstp(0); // Clear FPU stack.
1670 __ ret(kPointerSize);
1671 } else { // UNTAGGED.
1672 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1673 __ Ret();
1674 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001675
1676 __ bind(&cache_miss);
1677 // Update cache with new value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001678 if (tagged) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001679 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001680 } else { // UNTAGGED.
1681 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1682 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1683 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1684 }
1685 GenerateOperation(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001686 __ movq(Operand(rcx, 0), rbx);
1687 __ movq(Operand(rcx, 2 * kIntSize), rax);
1688 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001689 if (tagged) {
1690 __ ret(kPointerSize);
1691 } else { // UNTAGGED.
1692 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1693 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001694
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001695 // Skip cache and return answer directly, only in untagged case.
1696 __ bind(&skip_cache);
1697 __ subq(rsp, Immediate(kDoubleSize));
1698 __ movsd(Operand(rsp, 0), xmm1);
1699 __ fld_d(Operand(rsp, 0));
1700 GenerateOperation(masm);
1701 __ fstp_d(Operand(rsp, 0));
1702 __ movsd(xmm1, Operand(rsp, 0));
1703 __ addq(rsp, Immediate(kDoubleSize));
1704 // We return the value in xmm1 without adding it to the cache, but
1705 // we cause a scavenging GC so that future allocations will succeed.
1706 __ EnterInternalFrame();
1707 // Allocate an unused object bigger than a HeapNumber.
1708 __ Push(Smi::FromInt(2 * kDoubleSize));
1709 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1710 __ LeaveInternalFrame();
1711 __ Ret();
1712 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001713
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001714 // Call runtime, doing whatever allocation and cleanup is necessary.
1715 if (tagged) {
1716 __ bind(&runtime_call_clear_stack);
1717 __ fstp(0);
1718 __ bind(&runtime_call);
Steve Block44f0eee2011-05-26 01:26:41 +01001719 __ TailCallExternalReference(
1720 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001721 } else { // UNTAGGED.
1722 __ bind(&runtime_call_clear_stack);
1723 __ bind(&runtime_call);
1724 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1725 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1726 __ EnterInternalFrame();
1727 __ push(rax);
1728 __ CallRuntime(RuntimeFunction(), 1);
1729 __ LeaveInternalFrame();
1730 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1731 __ Ret();
1732 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001733}
1734
1735
1736Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1737 switch (type_) {
1738 // Add more cases when necessary.
1739 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1740 case TranscendentalCache::COS: return Runtime::kMath_cos;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001741 case TranscendentalCache::LOG: return Runtime::kMath_log;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001742 default:
1743 UNIMPLEMENTED();
1744 return Runtime::kAbort;
1745 }
1746}
1747
1748
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001749void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001750 // Registers:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001751 // rax: Newly allocated HeapNumber, which must be preserved.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001752 // rbx: Bits of input double. Must be preserved.
1753 // rcx: Pointer to cache entry. Must be preserved.
1754 // st(0): Input double
1755 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001756 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
1757 // Both fsin and fcos require arguments in the range +/-2^63 and
1758 // return NaN for infinities and NaN. They can share all code except
1759 // the actual fsin/fcos operation.
1760 Label in_range;
1761 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
1762 // work. We must reduce it to the appropriate range.
1763 __ movq(rdi, rbx);
1764 // Move exponent and sign bits to low bits.
1765 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
1766 // Remove sign bit.
1767 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
1768 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
1769 __ cmpl(rdi, Immediate(supported_exponent_limit));
1770 __ j(below, &in_range);
1771 // Check for infinity and NaN. Both return NaN for sin.
1772 __ cmpl(rdi, Immediate(0x7ff));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001773 NearLabel non_nan_result;
1774 __ j(not_equal, &non_nan_result);
1775 // Input is +/-Infinity or NaN. Result is NaN.
1776 __ fstp(0);
1777 __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
1778 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
1779 __ jmp(&done);
1780
1781 __ bind(&non_nan_result);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001782
Ben Murdochb0fe1622011-05-05 13:52:32 +01001783 // Use fpmod to restrict argument to the range +/-2*PI.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001784 __ movq(rdi, rax); // Save rax before using fnstsw_ax.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001785 __ fldpi();
1786 __ fadd(0);
1787 __ fld(1);
1788 // FPU Stack: input, 2*pi, input.
1789 {
1790 Label no_exceptions;
1791 __ fwait();
1792 __ fnstsw_ax();
1793 // Clear if Illegal Operand or Zero Division exceptions are set.
1794 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
1795 __ j(zero, &no_exceptions);
1796 __ fnclex();
1797 __ bind(&no_exceptions);
1798 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001799
Ben Murdochb0fe1622011-05-05 13:52:32 +01001800 // Compute st(0) % st(1)
1801 {
1802 NearLabel partial_remainder_loop;
1803 __ bind(&partial_remainder_loop);
1804 __ fprem1();
1805 __ fwait();
1806 __ fnstsw_ax();
1807 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
1808 // If C2 is set, computation only has partial result. Loop to
1809 // continue computation.
1810 __ j(not_zero, &partial_remainder_loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001811 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812 // FPU Stack: input, 2*pi, input % 2*pi
1813 __ fstp(2);
1814 // FPU Stack: input % 2*pi, 2*pi,
1815 __ fstp(0);
1816 // FPU Stack: input % 2*pi
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001817 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001818 __ bind(&in_range);
1819 switch (type_) {
1820 case TranscendentalCache::SIN:
1821 __ fsin();
1822 break;
1823 case TranscendentalCache::COS:
1824 __ fcos();
1825 break;
1826 default:
1827 UNREACHABLE();
1828 }
1829 __ bind(&done);
1830 } else {
1831 ASSERT(type_ == TranscendentalCache::LOG);
1832 __ fldln2();
1833 __ fxch();
1834 __ fyl2x();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001835 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001836}
1837
1838
1839// Get the integer part of a heap number.
1840// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
1841void IntegerConvert(MacroAssembler* masm,
1842 Register result,
1843 Register source) {
1844 // Result may be rcx. If result and source are the same register, source will
1845 // be overwritten.
1846 ASSERT(!result.is(rdi) && !result.is(rbx));
1847 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
1848 // cvttsd2si (32-bit version) directly.
1849 Register double_exponent = rbx;
1850 Register double_value = rdi;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001851 NearLabel done, exponent_63_plus;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001852 // Get double and extract exponent.
1853 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
1854 // Clear result preemptively, in case we need to return zero.
1855 __ xorl(result, result);
1856 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
1857 // Double to remove sign bit, shift exponent down to least significant bits.
1858 // and subtract bias to get the unshifted, unbiased exponent.
1859 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
1860 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
1861 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
1862 // Check whether the exponent is too big for a 63 bit unsigned integer.
1863 __ cmpl(double_exponent, Immediate(63));
1864 __ j(above_equal, &exponent_63_plus);
1865 // Handle exponent range 0..62.
1866 __ cvttsd2siq(result, xmm0);
1867 __ jmp(&done);
1868
1869 __ bind(&exponent_63_plus);
1870 // Exponent negative or 63+.
1871 __ cmpl(double_exponent, Immediate(83));
1872 // If exponent negative or above 83, number contains no significant bits in
1873 // the range 0..2^31, so result is zero, and rcx already holds zero.
1874 __ j(above, &done);
1875
1876 // Exponent in rage 63..83.
1877 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
1878 // the least significant exponent-52 bits.
1879
1880 // Negate low bits of mantissa if value is negative.
1881 __ addq(double_value, double_value); // Move sign bit to carry.
1882 __ sbbl(result, result); // And convert carry to -1 in result register.
1883 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
1884 __ addl(double_value, result);
1885 // Do xor in opposite directions depending on where we want the result
1886 // (depending on whether result is rcx or not).
1887
1888 if (result.is(rcx)) {
1889 __ xorl(double_value, result);
1890 // Left shift mantissa by (exponent - mantissabits - 1) to save the
1891 // bits that have positional values below 2^32 (the extra -1 comes from the
1892 // doubling done above to move the sign bit into the carry flag).
1893 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1894 __ shll_cl(double_value);
1895 __ movl(result, double_value);
1896 } else {
1897 // As the then-branch, but move double-value to result before shifting.
1898 __ xorl(result, double_value);
1899 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
1900 __ shll_cl(result);
1901 }
1902
1903 __ bind(&done);
1904}
1905
1906
1907// Input: rdx, rax are the left and right objects of a bit op.
1908// Output: rax, rcx are left and right integers for a bit op.
1909void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm) {
1910 // Check float operands.
1911 Label done;
1912 Label rax_is_smi;
1913 Label rax_is_object;
1914 Label rdx_is_object;
1915
1916 __ JumpIfNotSmi(rdx, &rdx_is_object);
1917 __ SmiToInteger32(rdx, rdx);
1918 __ JumpIfSmi(rax, &rax_is_smi);
1919
1920 __ bind(&rax_is_object);
1921 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx.
1922 __ jmp(&done);
1923
1924 __ bind(&rdx_is_object);
1925 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx.
1926 __ JumpIfNotSmi(rax, &rax_is_object);
1927 __ bind(&rax_is_smi);
1928 __ SmiToInteger32(rcx, rax);
1929
1930 __ bind(&done);
1931 __ movl(rax, rdx);
1932}
1933
1934
1935// Input: rdx, rax are the left and right objects of a bit op.
1936// Output: rax, rcx are left and right integers for a bit op.
Steve Block1e0659c2011-05-24 12:43:12 +01001937// Jump to conversion_failure: rdx and rax are unchanged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001938void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
1939 Label* conversion_failure,
1940 Register heap_number_map) {
1941 // Check float operands.
1942 Label arg1_is_object, check_undefined_arg1;
1943 Label arg2_is_object, check_undefined_arg2;
1944 Label load_arg2, done;
1945
1946 __ JumpIfNotSmi(rdx, &arg1_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001947 __ SmiToInteger32(r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001948 __ jmp(&load_arg2);
1949
1950 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1951 __ bind(&check_undefined_arg1);
1952 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1953 __ j(not_equal, conversion_failure);
Steve Block1e0659c2011-05-24 12:43:12 +01001954 __ movl(r8, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001955 __ jmp(&load_arg2);
1956
1957 __ bind(&arg1_is_object);
1958 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1959 __ j(not_equal, &check_undefined_arg1);
Steve Block1e0659c2011-05-24 12:43:12 +01001960 // Get the untagged integer version of the rdx heap number in rcx.
1961 IntegerConvert(masm, r8, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001962
Steve Block1e0659c2011-05-24 12:43:12 +01001963 // Here r8 has the untagged integer, rax has a Smi or a heap number.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001964 __ bind(&load_arg2);
1965 // Test if arg2 is a Smi.
1966 __ JumpIfNotSmi(rax, &arg2_is_object);
Steve Block1e0659c2011-05-24 12:43:12 +01001967 __ SmiToInteger32(rcx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001968 __ jmp(&done);
1969
1970 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1971 __ bind(&check_undefined_arg2);
1972 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1973 __ j(not_equal, conversion_failure);
1974 __ movl(rcx, Immediate(0));
1975 __ jmp(&done);
1976
1977 __ bind(&arg2_is_object);
1978 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1979 __ j(not_equal, &check_undefined_arg2);
1980 // Get the untagged integer version of the rax heap number in rcx.
1981 IntegerConvert(masm, rcx, rax);
1982 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01001983 __ movl(rax, r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001984}
1985
1986
1987void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1988 __ SmiToInteger32(kScratchRegister, rdx);
1989 __ cvtlsi2sd(xmm0, kScratchRegister);
1990 __ SmiToInteger32(kScratchRegister, rax);
1991 __ cvtlsi2sd(xmm1, kScratchRegister);
1992}
1993
1994
1995void FloatingPointHelper::LoadSSE2NumberOperands(MacroAssembler* masm) {
1996 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, done;
1997 // Load operand in rdx into xmm0.
1998 __ JumpIfSmi(rdx, &load_smi_rdx);
1999 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2000 // Load operand in rax into xmm1.
2001 __ JumpIfSmi(rax, &load_smi_rax);
2002 __ bind(&load_nonsmi_rax);
2003 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2004 __ jmp(&done);
2005
2006 __ bind(&load_smi_rdx);
2007 __ SmiToInteger32(kScratchRegister, rdx);
2008 __ cvtlsi2sd(xmm0, kScratchRegister);
2009 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
2010
2011 __ bind(&load_smi_rax);
2012 __ SmiToInteger32(kScratchRegister, rax);
2013 __ cvtlsi2sd(xmm1, kScratchRegister);
2014
2015 __ bind(&done);
2016}
2017
2018
2019void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
2020 Label* not_numbers) {
2021 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
2022 // Load operand in rdx into xmm0, or branch to not_numbers.
2023 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
2024 __ JumpIfSmi(rdx, &load_smi_rdx);
2025 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
2026 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
2027 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2028 // Load operand in rax into xmm1, or branch to not_numbers.
2029 __ JumpIfSmi(rax, &load_smi_rax);
2030
2031 __ bind(&load_nonsmi_rax);
2032 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
2033 __ j(not_equal, not_numbers);
2034 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2035 __ jmp(&done);
2036
2037 __ bind(&load_smi_rdx);
2038 __ SmiToInteger32(kScratchRegister, rdx);
2039 __ cvtlsi2sd(xmm0, kScratchRegister);
2040 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
2041
2042 __ bind(&load_smi_rax);
2043 __ SmiToInteger32(kScratchRegister, rax);
2044 __ cvtlsi2sd(xmm1, kScratchRegister);
2045 __ bind(&done);
2046}
2047
2048
2049void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
2050 Label slow, done;
2051
2052 if (op_ == Token::SUB) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002053 if (include_smi_code_) {
2054 // Check whether the value is a smi.
2055 Label try_float;
2056 __ JumpIfNotSmi(rax, &try_float);
2057 if (negative_zero_ == kIgnoreNegativeZero) {
2058 __ SmiCompare(rax, Smi::FromInt(0));
2059 __ j(equal, &done);
2060 }
2061 __ SmiNeg(rax, rax, &done);
Ben Murdochf87a2032010-10-22 12:50:53 +01002062 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002063
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002064 // Try floating point case.
2065 __ bind(&try_float);
2066 } else if (FLAG_debug_code) {
2067 __ AbortIfSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002068 }
2069
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002070 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2071 Heap::kHeapNumberMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002072 __ j(not_equal, &slow);
2073 // Operand is a float, negate its value by flipping sign bit.
2074 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
2075 __ movq(kScratchRegister, Immediate(0x01));
2076 __ shl(kScratchRegister, Immediate(63));
2077 __ xor_(rdx, kScratchRegister); // Flip sign.
2078 // rdx is value to store.
2079 if (overwrite_ == UNARY_OVERWRITE) {
2080 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
2081 } else {
2082 __ AllocateHeapNumber(rcx, rbx, &slow);
2083 // rcx: allocated 'empty' number
2084 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
2085 __ movq(rax, rcx);
2086 }
2087 } else if (op_ == Token::BIT_NOT) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002088 if (include_smi_code_) {
2089 Label try_float;
2090 __ JumpIfNotSmi(rax, &try_float);
2091 __ SmiNot(rax, rax);
2092 __ jmp(&done);
2093 // Try floating point case.
2094 __ bind(&try_float);
2095 } else if (FLAG_debug_code) {
2096 __ AbortIfSmi(rax);
2097 }
2098
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002099 // Check if the operand is a heap number.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002100 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2101 Heap::kHeapNumberMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002102 __ j(not_equal, &slow);
2103
2104 // Convert the heap number in rax to an untagged integer in rcx.
2105 IntegerConvert(masm, rax, rax);
2106
2107 // Do the bitwise operation and smi tag the result.
2108 __ notl(rax);
2109 __ Integer32ToSmi(rax, rax);
2110 }
2111
2112 // Return from the stub.
2113 __ bind(&done);
2114 __ StubReturn(1);
2115
2116 // Handle the slow case by jumping to the JavaScript builtin.
2117 __ bind(&slow);
2118 __ pop(rcx); // pop return address
2119 __ push(rax);
2120 __ push(rcx); // push return address
2121 switch (op_) {
2122 case Token::SUB:
2123 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
2124 break;
2125 case Token::BIT_NOT:
2126 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2127 break;
2128 default:
2129 UNREACHABLE();
2130 }
2131}
2132
2133
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002134void MathPowStub::Generate(MacroAssembler* masm) {
2135 // Registers are used as follows:
2136 // rdx = base
2137 // rax = exponent
2138 // rcx = temporary, result
2139
2140 Label allocate_return, call_runtime;
2141
2142 // Load input parameters.
2143 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2144 __ movq(rax, Operand(rsp, 1 * kPointerSize));
2145
2146 // Save 1 in xmm3 - we need this several times later on.
2147 __ movl(rcx, Immediate(1));
2148 __ cvtlsi2sd(xmm3, rcx);
2149
2150 Label exponent_nonsmi;
2151 Label base_nonsmi;
2152 // If the exponent is a heap number go to that specific case.
2153 __ JumpIfNotSmi(rax, &exponent_nonsmi);
2154 __ JumpIfNotSmi(rdx, &base_nonsmi);
2155
2156 // Optimized version when both exponent and base are smis.
2157 Label powi;
2158 __ SmiToInteger32(rdx, rdx);
2159 __ cvtlsi2sd(xmm0, rdx);
2160 __ jmp(&powi);
2161 // Exponent is a smi and base is a heapnumber.
2162 __ bind(&base_nonsmi);
2163 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2164 Heap::kHeapNumberMapRootIndex);
2165 __ j(not_equal, &call_runtime);
2166
2167 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2168
2169 // Optimized version of pow if exponent is a smi.
2170 // xmm0 contains the base.
2171 __ bind(&powi);
2172 __ SmiToInteger32(rax, rax);
2173
2174 // Save exponent in base as we need to check if exponent is negative later.
2175 // We know that base and exponent are in different registers.
2176 __ movq(rdx, rax);
2177
2178 // Get absolute value of exponent.
2179 NearLabel no_neg;
2180 __ cmpl(rax, Immediate(0));
2181 __ j(greater_equal, &no_neg);
2182 __ negl(rax);
2183 __ bind(&no_neg);
2184
2185 // Load xmm1 with 1.
2186 __ movsd(xmm1, xmm3);
2187 NearLabel while_true;
2188 NearLabel no_multiply;
2189
2190 __ bind(&while_true);
2191 __ shrl(rax, Immediate(1));
2192 __ j(not_carry, &no_multiply);
2193 __ mulsd(xmm1, xmm0);
2194 __ bind(&no_multiply);
2195 __ mulsd(xmm0, xmm0);
2196 __ j(not_zero, &while_true);
2197
2198 // Base has the original value of the exponent - if the exponent is
2199 // negative return 1/result.
2200 __ testl(rdx, rdx);
2201 __ j(positive, &allocate_return);
2202 // Special case if xmm1 has reached infinity.
2203 __ divsd(xmm3, xmm1);
2204 __ movsd(xmm1, xmm3);
2205 __ xorpd(xmm0, xmm0);
2206 __ ucomisd(xmm0, xmm1);
2207 __ j(equal, &call_runtime);
2208
2209 __ jmp(&allocate_return);
2210
2211 // Exponent (or both) is a heapnumber - no matter what we should now work
2212 // on doubles.
2213 __ bind(&exponent_nonsmi);
2214 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2215 Heap::kHeapNumberMapRootIndex);
2216 __ j(not_equal, &call_runtime);
2217 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
2218 // Test if exponent is nan.
2219 __ ucomisd(xmm1, xmm1);
2220 __ j(parity_even, &call_runtime);
2221
2222 NearLabel base_not_smi;
2223 NearLabel handle_special_cases;
2224 __ JumpIfNotSmi(rdx, &base_not_smi);
2225 __ SmiToInteger32(rdx, rdx);
2226 __ cvtlsi2sd(xmm0, rdx);
2227 __ jmp(&handle_special_cases);
2228
2229 __ bind(&base_not_smi);
2230 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
2231 Heap::kHeapNumberMapRootIndex);
2232 __ j(not_equal, &call_runtime);
2233 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
2234 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2235 __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
2236 // base is NaN or +/-Infinity
2237 __ j(greater_equal, &call_runtime);
2238 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2239
2240 // base is in xmm0 and exponent is in xmm1.
2241 __ bind(&handle_special_cases);
2242 NearLabel not_minus_half;
2243 // Test for -0.5.
2244 // Load xmm2 with -0.5.
2245 __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
2246 __ movq(xmm2, rcx);
2247 // xmm2 now has -0.5.
2248 __ ucomisd(xmm2, xmm1);
2249 __ j(not_equal, &not_minus_half);
2250
2251 // Calculates reciprocal of square root.
2252 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2253 __ xorpd(xmm1, xmm1);
2254 __ addsd(xmm1, xmm0);
2255 __ sqrtsd(xmm1, xmm1);
2256 __ divsd(xmm3, xmm1);
2257 __ movsd(xmm1, xmm3);
2258 __ jmp(&allocate_return);
2259
2260 // Test for 0.5.
2261 __ bind(&not_minus_half);
2262 // Load xmm2 with 0.5.
2263 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
2264 __ addsd(xmm2, xmm3);
2265 // xmm2 now has 0.5.
2266 __ ucomisd(xmm2, xmm1);
2267 __ j(not_equal, &call_runtime);
2268 // Calculates square root.
2269 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2270 __ xorpd(xmm1, xmm1);
2271 __ addsd(xmm1, xmm0);
2272 __ sqrtsd(xmm1, xmm1);
2273
2274 __ bind(&allocate_return);
2275 __ AllocateHeapNumber(rcx, rax, &call_runtime);
2276 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
2277 __ movq(rax, rcx);
2278 __ ret(2 * kPointerSize);
2279
2280 __ bind(&call_runtime);
2281 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
2282}
2283
2284
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002285void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2286 // The key is in rdx and the parameter count is in rax.
2287
2288 // The displacement is used for skipping the frame pointer on the
2289 // stack. It is the offset of the last parameter (if any) relative
2290 // to the frame pointer.
2291 static const int kDisplacement = 1 * kPointerSize;
2292
2293 // Check that the key is a smi.
2294 Label slow;
2295 __ JumpIfNotSmi(rdx, &slow);
2296
Steve Block44f0eee2011-05-26 01:26:41 +01002297 // Check if the calling frame is an arguments adaptor frame. We look at the
2298 // context offset, and if the frame is not a regular one, then we find a
2299 // Smi instead of the context. We can't use SmiCompare here, because that
2300 // only works for comparing two smis.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002301 Label adaptor;
2302 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002303 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2304 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002305 __ j(equal, &adaptor);
2306
2307 // Check index against formal parameters count limit passed in
2308 // through register rax. Use unsigned comparison to get negative
2309 // check for free.
2310 __ cmpq(rdx, rax);
2311 __ j(above_equal, &slow);
2312
2313 // Read the argument from the stack and return it.
2314 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
2315 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
2316 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2317 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2318 __ Ret();
2319
2320 // Arguments adaptor case: Check index against actual arguments
2321 // limit found in the arguments adaptor frame. Use unsigned
2322 // comparison to get negative check for free.
2323 __ bind(&adaptor);
2324 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2325 __ cmpq(rdx, rcx);
2326 __ j(above_equal, &slow);
2327
2328 // Read the argument from the stack and return it.
2329 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
2330 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
2331 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
2332 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
2333 __ Ret();
2334
2335 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2336 // by calling the runtime system.
2337 __ bind(&slow);
2338 __ pop(rbx); // Return address.
2339 __ push(rdx);
2340 __ push(rbx);
2341 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2342}
2343
2344
2345void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
2346 // rsp[0] : return address
2347 // rsp[8] : number of parameters
2348 // rsp[16] : receiver displacement
2349 // rsp[24] : function
2350
2351 // The displacement is used for skipping the return address and the
2352 // frame pointer on the stack. It is the offset of the last
2353 // parameter (if any) relative to the frame pointer.
2354 static const int kDisplacement = 2 * kPointerSize;
2355
2356 // Check if the calling frame is an arguments adaptor frame.
2357 Label adaptor_frame, try_allocate, runtime;
2358 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002359 __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
2360 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002361 __ j(equal, &adaptor_frame);
2362
2363 // Get the length from the frame.
2364 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
2365 __ jmp(&try_allocate);
2366
2367 // Patch the arguments.length and the parameters pointer.
2368 __ bind(&adaptor_frame);
2369 __ SmiToInteger32(rcx,
2370 Operand(rdx,
2371 ArgumentsAdaptorFrameConstants::kLengthOffset));
2372 // Space on stack must already hold a smi.
2373 __ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
2374 // Do not clobber the length index for the indexing operation since
2375 // it is used compute the size for allocation later.
2376 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
2377 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2378
2379 // Try the new space allocation. Start out with computing the size of
2380 // the arguments object and the elements array.
2381 Label add_arguments_object;
2382 __ bind(&try_allocate);
2383 __ testl(rcx, rcx);
2384 __ j(zero, &add_arguments_object);
2385 __ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
2386 __ bind(&add_arguments_object);
Steve Block44f0eee2011-05-26 01:26:41 +01002387 __ addl(rcx, Immediate(GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002388
2389 // Do the allocation of both objects in one go.
2390 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
2391
2392 // Get the arguments boilerplate from the current (global) context.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002393 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2394 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002395 __ movq(rdi, Operand(rdi,
2396 Context::SlotOffset(GetArgumentsBoilerplateIndex())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002397
2398 // Copy the JS object part.
2399 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
2400 __ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
2401 __ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
2402 __ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
2403 __ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
2404 __ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
2405 __ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
2406
Steve Block44f0eee2011-05-26 01:26:41 +01002407 if (type_ == NEW_NON_STRICT) {
2408 // Setup the callee in-object property.
2409 ASSERT(Heap::kArgumentsCalleeIndex == 1);
2410 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
2411 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2412 Heap::kArgumentsCalleeIndex * kPointerSize),
2413 kScratchRegister);
2414 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002415
2416 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01002417 ASSERT(Heap::kArgumentsLengthIndex == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002418 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002419 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
2420 Heap::kArgumentsLengthIndex * kPointerSize),
2421 rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002422
2423 // If there are no actual arguments, we're done.
2424 Label done;
2425 __ SmiTest(rcx);
2426 __ j(zero, &done);
2427
2428 // Get the parameters pointer from the stack and untag the length.
2429 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
2430
2431 // Setup the elements pointer in the allocated arguments object and
2432 // initialize the header in the elements fixed array.
Steve Block44f0eee2011-05-26 01:26:41 +01002433 __ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002434 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
2435 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2436 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
2437 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
2438 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
2439
2440 // Copy the fixed array slots.
2441 Label loop;
2442 __ bind(&loop);
2443 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
2444 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
2445 __ addq(rdi, Immediate(kPointerSize));
2446 __ subq(rdx, Immediate(kPointerSize));
2447 __ decl(rcx);
2448 __ j(not_zero, &loop);
2449
2450 // Return and remove the on-stack parameters.
2451 __ bind(&done);
2452 __ ret(3 * kPointerSize);
2453
2454 // Do the runtime call to allocate the arguments object.
2455 __ bind(&runtime);
2456 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
2457}
2458
2459
2460void RegExpExecStub::Generate(MacroAssembler* masm) {
2461 // Just jump directly to runtime if native RegExp is not selected at compile
2462 // time or if regexp entry in generated code is turned off runtime switch or
2463 // at compilation.
2464#ifdef V8_INTERPRETED_REGEXP
2465 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2466#else // V8_INTERPRETED_REGEXP
2467 if (!FLAG_regexp_entry_native) {
2468 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2469 return;
2470 }
2471
2472 // Stack frame on entry.
Steve Block1e0659c2011-05-24 12:43:12 +01002473 // rsp[0]: return address
2474 // rsp[8]: last_match_info (expected JSArray)
2475 // rsp[16]: previous index
2476 // rsp[24]: subject string
2477 // rsp[32]: JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002478
2479 static const int kLastMatchInfoOffset = 1 * kPointerSize;
2480 static const int kPreviousIndexOffset = 2 * kPointerSize;
2481 static const int kSubjectOffset = 3 * kPointerSize;
2482 static const int kJSRegExpOffset = 4 * kPointerSize;
2483
2484 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002485 // Ensure that a RegExp stack is allocated.
Steve Block44f0eee2011-05-26 01:26:41 +01002486 Isolate* isolate = masm->isolate();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002487 ExternalReference address_of_regexp_stack_memory_address =
Steve Block44f0eee2011-05-26 01:26:41 +01002488 ExternalReference::address_of_regexp_stack_memory_address(isolate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002489 ExternalReference address_of_regexp_stack_memory_size =
Steve Block44f0eee2011-05-26 01:26:41 +01002490 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2491 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002492 __ testq(kScratchRegister, kScratchRegister);
2493 __ j(zero, &runtime);
2494
2495
2496 // Check that the first argument is a JSRegExp object.
2497 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2498 __ JumpIfSmi(rax, &runtime);
2499 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2500 __ j(not_equal, &runtime);
2501 // Check that the RegExp has been compiled (data contains a fixed array).
Steve Block44f0eee2011-05-26 01:26:41 +01002502 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002503 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +01002504 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002505 __ Check(NegateCondition(is_smi),
2506 "Unexpected type for RegExp data, FixedArray expected");
Steve Block44f0eee2011-05-26 01:26:41 +01002507 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002508 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
2509 }
2510
Steve Block44f0eee2011-05-26 01:26:41 +01002511 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002512 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +01002513 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002514 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2515 __ j(not_equal, &runtime);
2516
Steve Block44f0eee2011-05-26 01:26:41 +01002517 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002518 // Check that the number of captures fit in the static offsets vector buffer.
2519 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +01002520 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002521 // Calculate number of capture registers (number_of_captures + 1) * 2.
2522 __ leal(rdx, Operand(rdx, rdx, times_1, 2));
2523 // Check that the static offsets vector buffer is large enough.
2524 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
2525 __ j(above, &runtime);
2526
Steve Block44f0eee2011-05-26 01:26:41 +01002527 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002528 // rdx: Number of capture registers
2529 // Check that the second argument is a string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002530 __ movq(rdi, Operand(rsp, kSubjectOffset));
2531 __ JumpIfSmi(rdi, &runtime);
2532 Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002533 __ j(NegateCondition(is_string), &runtime);
2534
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002535 // rdi: Subject string.
2536 // rax: RegExp data (FixedArray).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002537 // rdx: Number of capture registers.
2538 // Check that the third argument is a positive smi less than the string
2539 // length. A negative value will be greater (unsigned comparison).
2540 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
2541 __ JumpIfNotSmi(rbx, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002542 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002543 __ j(above_equal, &runtime);
2544
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002545 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002546 // rdx: Number of capture registers
2547 // Check that the fourth object is a JSArray object.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002548 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
2549 __ JumpIfSmi(rdi, &runtime);
2550 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002551 __ j(not_equal, &runtime);
2552 // Check that the JSArray is in fast case.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002553 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
2554 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002555 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
2556 Heap::kFixedArrayMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002557 __ j(not_equal, &runtime);
2558 // Check that the last match info has space for the capture registers and the
2559 // additional information. Ensure no overflow in add.
2560 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002561 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002562 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002563 __ cmpl(rdx, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002564 __ j(greater, &runtime);
2565
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002566 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002567 // Check the representation and encoding of the subject string.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002568 NearLabel seq_ascii_string, seq_two_byte_string, check_code;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002569 __ movq(rdi, Operand(rsp, kSubjectOffset));
2570 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002571 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2572 // First check for flat two byte string.
2573 __ andb(rbx, Immediate(
2574 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
2575 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
2576 __ j(zero, &seq_two_byte_string);
2577 // Any other flat string must be a flat ascii string.
2578 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
2579 __ j(zero, &seq_ascii_string);
2580
2581 // Check for flat cons string.
2582 // A flat cons string is a cons string where the second part is the empty
2583 // string. In that case the subject string is just the first part of the cons
2584 // string. Also in this case the first part of the cons string is known to be
2585 // a sequential string or an external string.
2586 STATIC_ASSERT(kExternalStringTag !=0);
2587 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
2588 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2589 __ j(not_zero, &runtime);
2590 // String is a cons string.
Steve Block44f0eee2011-05-26 01:26:41 +01002591 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
2592 Heap::kEmptyStringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002593 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002594 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
2595 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002596 // String is a cons string with empty second part.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002597 // rdi: first part of cons string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002598 // rbx: map of first part of cons string.
2599 // Is first part a flat two byte string?
2600 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2601 Immediate(kStringRepresentationMask | kStringEncodingMask));
2602 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
2603 __ j(zero, &seq_two_byte_string);
2604 // Any other flat string must be ascii.
2605 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2606 Immediate(kStringRepresentationMask));
2607 __ j(not_zero, &runtime);
2608
2609 __ bind(&seq_ascii_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002610 // rdi: subject string (sequential ascii)
2611 // rax: RegExp data (FixedArray)
2612 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
2613 __ Set(rcx, 1); // Type is ascii.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002614 __ jmp(&check_code);
2615
2616 __ bind(&seq_two_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002617 // rdi: subject string (flat two-byte)
2618 // rax: RegExp data (FixedArray)
2619 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
2620 __ Set(rcx, 0); // Type is two byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002621
2622 __ bind(&check_code);
2623 // Check that the irregexp code has been generated for the actual string
2624 // encoding. If it has, the field contains a code object otherwise it contains
2625 // the hole.
2626 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister);
2627 __ j(not_equal, &runtime);
2628
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002629 // rdi: subject string
2630 // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002631 // r11: code
2632 // Load used arguments before starting to push arguments for call to native
2633 // RegExp code to avoid handling changing stack height.
2634 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset));
2635
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002636 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002637 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002638 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002639 // r11: code
2640 // All checks done. Now push arguments for native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01002641 Counters* counters = masm->isolate()->counters();
2642 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002643
Steve Block44f0eee2011-05-26 01:26:41 +01002644 // Isolates: note we add an additional parameter here (isolate pointer).
2645 static const int kRegExpExecuteArguments = 8;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002646 int argument_slots_on_stack =
2647 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +01002648 __ EnterApiExitFrame(argument_slots_on_stack);
2649
2650 // Argument 8: Pass current isolate address.
2651 // __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2652 // Immediate(ExternalReference::isolate_address()));
2653 __ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
2654 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
2655 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002656
2657 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block44f0eee2011-05-26 01:26:41 +01002658 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002659 Immediate(1));
2660
2661 // Argument 6: Start (high end) of backtracking stack memory area.
2662 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
2663 __ movq(r9, Operand(kScratchRegister, 0));
2664 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
2665 __ addq(r9, Operand(kScratchRegister, 0));
2666 // Argument 6 passed in r9 on Linux and on the stack on Windows.
2667#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002668 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002669#endif
2670
2671 // Argument 5: static offsets vector buffer.
Steve Block44f0eee2011-05-26 01:26:41 +01002672 __ LoadAddress(r8,
2673 ExternalReference::address_of_static_offsets_vector(isolate));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002674 // Argument 5 passed in r8 on Linux and on the stack on Windows.
2675#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01002676 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002677#endif
2678
2679 // First four arguments are passed in registers on both Linux and Windows.
2680#ifdef _WIN64
2681 Register arg4 = r9;
2682 Register arg3 = r8;
2683 Register arg2 = rdx;
2684 Register arg1 = rcx;
2685#else
2686 Register arg4 = rcx;
2687 Register arg3 = rdx;
2688 Register arg2 = rsi;
2689 Register arg1 = rdi;
2690#endif
2691
2692 // Keep track on aliasing between argX defined above and the registers used.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002693 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002694 // rbx: previous index
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002695 // rcx: encoding of subject string (1 if ascii 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002696 // r11: code
2697
2698 // Argument 4: End of string data
2699 // Argument 3: Start of string data
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002700 NearLabel setup_two_byte, setup_rest;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002701 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002702 __ j(zero, &setup_two_byte);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002703 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2704 __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
2705 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002706 __ jmp(&setup_rest);
2707 __ bind(&setup_two_byte);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002708 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
2709 __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
2710 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002711
2712 __ bind(&setup_rest);
2713 // Argument 2: Previous index.
2714 __ movq(arg2, rbx);
2715
2716 // Argument 1: Subject string.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002717#ifdef _WIN64
2718 __ movq(arg1, rdi);
2719#else
2720 // Already there in AMD64 calling convention.
2721 ASSERT(arg1.is(rdi));
2722#endif
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002723
2724 // Locate the code entry and call it.
2725 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002726 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002727
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002728 __ LeaveApiExitFrame();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002729
2730 // Check the result.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002731 NearLabel success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002732 Label exception;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002733 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
2734 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002735 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002736 __ j(equal, &exception);
2737 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
2738 // If none of the above, it can only be retry.
2739 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002740 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002741
2742 // For failure return null.
2743 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002744 __ ret(4 * kPointerSize);
2745
2746 // Load RegExp data.
2747 __ bind(&success);
2748 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2749 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2750 __ SmiToInteger32(rax,
2751 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2752 // Calculate number of capture registers (number_of_captures + 1) * 2.
2753 __ leal(rdx, Operand(rax, rax, times_1, 2));
2754
2755 // rdx: Number of capture registers
2756 // Load last_match_info which is still known to be a fast case JSArray.
2757 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2758 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
2759
2760 // rbx: last_match_info backing store (FixedArray)
2761 // rdx: number of capture registers
2762 // Store the capture count.
2763 __ Integer32ToSmi(kScratchRegister, rdx);
2764 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2765 kScratchRegister);
2766 // Store last subject and last input.
2767 __ movq(rax, Operand(rsp, kSubjectOffset));
2768 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2769 __ movq(rcx, rbx);
2770 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
2771 __ movq(rax, Operand(rsp, kSubjectOffset));
2772 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2773 __ movq(rcx, rbx);
2774 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
2775
2776 // Get the static offsets vector filled by the native regexp code.
Steve Block44f0eee2011-05-26 01:26:41 +01002777 __ LoadAddress(rcx,
2778 ExternalReference::address_of_static_offsets_vector(isolate));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002779
2780 // rbx: last_match_info backing store (FixedArray)
2781 // rcx: offsets vector
2782 // rdx: number of capture registers
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002783 NearLabel next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002784 // Capture register counter starts from number of capture registers and
2785 // counts down until wraping after zero.
2786 __ bind(&next_capture);
2787 __ subq(rdx, Immediate(1));
2788 __ j(negative, &done);
2789 // Read the value from the static offsets vector buffer and make it a smi.
2790 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002791 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002792 // Store the smi value in the last match info.
2793 __ movq(FieldOperand(rbx,
2794 rdx,
2795 times_pointer_size,
2796 RegExpImpl::kFirstCaptureOffset),
2797 rdi);
2798 __ jmp(&next_capture);
2799 __ bind(&done);
2800
2801 // Return last match info.
2802 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
2803 __ ret(4 * kPointerSize);
2804
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002805 __ bind(&exception);
2806 // Result must now be exception. If there is no pending exception already a
2807 // stack overflow (on the backtrack stack) was detected in RegExp code but
2808 // haven't created the exception yet. Handle that in the runtime system.
2809 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +01002810 ExternalReference pending_exception_address(
2811 Isolate::k_pending_exception_address, isolate);
2812 Operand pending_exception_operand =
2813 masm->ExternalOperand(pending_exception_address, rbx);
2814 __ movq(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002815 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2816 __ cmpq(rax, rdx);
2817 __ j(equal, &runtime);
Steve Block44f0eee2011-05-26 01:26:41 +01002818 __ movq(pending_exception_operand, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002819
2820 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2821 NearLabel termination_exception;
2822 __ j(equal, &termination_exception);
2823 __ Throw(rax);
2824
2825 __ bind(&termination_exception);
2826 __ ThrowUncatchable(TERMINATION, rax);
2827
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002828 // Do the runtime call to execute the regexp.
2829 __ bind(&runtime);
2830 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2831#endif // V8_INTERPRETED_REGEXP
2832}
2833
2834
Ben Murdochb0fe1622011-05-05 13:52:32 +01002835void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2836 const int kMaxInlineLength = 100;
2837 Label slowcase;
2838 Label done;
2839 __ movq(r8, Operand(rsp, kPointerSize * 3));
2840 __ JumpIfNotSmi(r8, &slowcase);
2841 __ SmiToInteger32(rbx, r8);
2842 __ cmpl(rbx, Immediate(kMaxInlineLength));
2843 __ j(above, &slowcase);
2844 // Smi-tagging is equivalent to multiplying by 2.
2845 STATIC_ASSERT(kSmiTag == 0);
2846 STATIC_ASSERT(kSmiTagSize == 1);
Steve Block1e0659c2011-05-24 12:43:12 +01002847 // Allocate RegExpResult followed by FixedArray with size in rbx.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002848 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2849 // Elements: [Map][Length][..elements..]
2850 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2851 times_pointer_size,
2852 rbx, // In: Number of elements.
2853 rax, // Out: Start of allocation (tagged).
2854 rcx, // Out: End of allocation.
2855 rdx, // Scratch register
2856 &slowcase,
2857 TAG_OBJECT);
2858 // rax: Start of allocated area, object-tagged.
2859 // rbx: Number of array elements as int32.
2860 // r8: Number of array elements as smi.
2861
2862 // Set JSArray map to global.regexp_result_map().
2863 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
2864 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2865 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2866 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2867
2868 // Set empty properties FixedArray.
Steve Block44f0eee2011-05-26 01:26:41 +01002869 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
2870 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002871
2872 // Set elements to point to FixedArray allocated right after the JSArray.
2873 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2874 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2875
2876 // Set input, index and length fields from arguments.
2877 __ movq(r8, Operand(rsp, kPointerSize * 1));
2878 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2879 __ movq(r8, Operand(rsp, kPointerSize * 2));
2880 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2881 __ movq(r8, Operand(rsp, kPointerSize * 3));
2882 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2883
2884 // Fill out the elements FixedArray.
2885 // rax: JSArray.
2886 // rcx: FixedArray.
2887 // rbx: Number of elements in array as int32.
2888
2889 // Set map.
Steve Block44f0eee2011-05-26 01:26:41 +01002890 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2891 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002892 // Set length.
2893 __ Integer32ToSmi(rdx, rbx);
2894 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2895 // Fill contents of fixed-array with the-hole.
Steve Block44f0eee2011-05-26 01:26:41 +01002896 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002897 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2898 // Fill fixed array elements with hole.
2899 // rax: JSArray.
2900 // rbx: Number of elements in array that remains to be filled, as int32.
2901 // rcx: Start of elements in FixedArray.
2902 // rdx: the hole.
2903 Label loop;
2904 __ testl(rbx, rbx);
2905 __ bind(&loop);
Steve Block1e0659c2011-05-24 12:43:12 +01002906 __ j(less_equal, &done); // Jump if rcx is negative or zero.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002907 __ subl(rbx, Immediate(1));
2908 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2909 __ jmp(&loop);
2910
2911 __ bind(&done);
2912 __ ret(3 * kPointerSize);
2913
2914 __ bind(&slowcase);
2915 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2916}
2917
2918
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002919void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
2920 Register object,
2921 Register result,
2922 Register scratch1,
2923 Register scratch2,
2924 bool object_is_smi,
2925 Label* not_found) {
2926 // Use of registers. Register result is used as a temporary.
2927 Register number_string_cache = result;
2928 Register mask = scratch1;
2929 Register scratch = scratch2;
2930
2931 // Load the number string cache.
2932 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2933
2934 // Make the hash mask from the length of the number string cache. It
2935 // contains two elements (number and string) for each cache entry.
2936 __ SmiToInteger32(
2937 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2938 __ shrl(mask, Immediate(1));
2939 __ subq(mask, Immediate(1)); // Make mask.
2940
2941 // Calculate the entry in the number string cache. The hash value in the
2942 // number string cache for smis is just the smi value, and the hash for
2943 // doubles is the xor of the upper and lower words. See
2944 // Heap::GetNumberStringCache.
2945 Label is_smi;
2946 Label load_result_from_cache;
2947 if (!object_is_smi) {
2948 __ JumpIfSmi(object, &is_smi);
Steve Block44f0eee2011-05-26 01:26:41 +01002949 __ CheckMap(object, FACTORY->heap_number_map(), not_found, true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002950
2951 STATIC_ASSERT(8 == kDoubleSize);
2952 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2953 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2954 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2955
2956 Register index = scratch;
2957 Register probe = mask;
2958 __ movq(probe,
2959 FieldOperand(number_string_cache,
2960 index,
2961 times_1,
2962 FixedArray::kHeaderSize));
2963 __ JumpIfSmi(probe, not_found);
Steve Block44f0eee2011-05-26 01:26:41 +01002964 ASSERT(Isolate::Current()->cpu_features()->IsSupported(SSE2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002965 CpuFeatures::Scope fscope(SSE2);
2966 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2967 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
2968 __ ucomisd(xmm0, xmm1);
2969 __ j(parity_even, not_found); // Bail out if NaN is involved.
2970 __ j(not_equal, not_found); // The cache did not contain this value.
2971 __ jmp(&load_result_from_cache);
2972 }
2973
2974 __ bind(&is_smi);
2975 __ SmiToInteger32(scratch, object);
2976 GenerateConvertHashCodeToIndex(masm, scratch, mask);
2977
2978 Register index = scratch;
2979 // Check if the entry is the smi we are looking for.
2980 __ cmpq(object,
2981 FieldOperand(number_string_cache,
2982 index,
2983 times_1,
2984 FixedArray::kHeaderSize));
2985 __ j(not_equal, not_found);
2986
2987 // Get the result from the cache.
2988 __ bind(&load_result_from_cache);
2989 __ movq(result,
2990 FieldOperand(number_string_cache,
2991 index,
2992 times_1,
2993 FixedArray::kHeaderSize + kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002994 Counters* counters = masm->isolate()->counters();
2995 __ IncrementCounter(counters->number_to_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002996}
2997
2998
2999void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
3000 Register hash,
3001 Register mask) {
3002 __ and_(hash, mask);
3003 // Each entry in string cache consists of two pointer sized fields,
3004 // but times_twice_pointer_size (multiplication by 16) scale factor
3005 // is not supported by addrmode on x64 platform.
3006 // So we have to premultiply entry index before lookup.
3007 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
3008}
3009
3010
3011void NumberToStringStub::Generate(MacroAssembler* masm) {
3012 Label runtime;
3013
3014 __ movq(rbx, Operand(rsp, kPointerSize));
3015
3016 // Generate code to lookup number in the number string cache.
3017 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
3018 __ ret(1 * kPointerSize);
3019
3020 __ bind(&runtime);
3021 // Handle number to string in the runtime system if not found in the cache.
3022 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3023}
3024
3025
3026static int NegativeComparisonResult(Condition cc) {
3027 ASSERT(cc != equal);
3028 ASSERT((cc == less) || (cc == less_equal)
3029 || (cc == greater) || (cc == greater_equal));
3030 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
3031}
3032
3033
3034void CompareStub::Generate(MacroAssembler* masm) {
3035 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3036
3037 Label check_unequal_objects, done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003038
3039 // Compare two smis if required.
3040 if (include_smi_compare_) {
3041 Label non_smi, smi_done;
3042 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
3043 __ subq(rdx, rax);
3044 __ j(no_overflow, &smi_done);
Ben Murdochf87a2032010-10-22 12:50:53 +01003045 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003046 __ bind(&smi_done);
3047 __ movq(rax, rdx);
3048 __ ret(0);
3049 __ bind(&non_smi);
3050 } else if (FLAG_debug_code) {
3051 Label ok;
3052 __ JumpIfNotSmi(rdx, &ok);
3053 __ JumpIfNotSmi(rax, &ok);
3054 __ Abort("CompareStub: smi operands");
3055 __ bind(&ok);
3056 }
3057
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003058 // The compare stub returns a positive, negative, or zero 64-bit integer
3059 // value in rax, corresponding to result of comparing the two inputs.
3060 // NOTICE! This code is only reached after a smi-fast-case check, so
3061 // it is certain that at least one operand isn't a smi.
3062
3063 // Two identical objects are equal unless they are both NaN or undefined.
3064 {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003065 NearLabel not_identical;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003066 __ cmpq(rax, rdx);
3067 __ j(not_equal, &not_identical);
3068
3069 if (cc_ != equal) {
3070 // Check for undefined. undefined OP undefined is false even though
3071 // undefined == undefined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003072 NearLabel check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003073 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
3074 __ j(not_equal, &check_for_nan);
3075 __ Set(rax, NegativeComparisonResult(cc_));
3076 __ ret(0);
3077 __ bind(&check_for_nan);
3078 }
3079
Steve Block44f0eee2011-05-26 01:26:41 +01003080 // Test for NaN. Sadly, we can't just compare to FACTORY->nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003081 // so we do the second best thing - test it ourselves.
3082 // Note: if cc_ != equal, never_nan_nan_ is not used.
3083 // We cannot set rax to EQUAL until just before return because
3084 // rax must be unchanged on jump to not_identical.
3085
3086 if (never_nan_nan_ && (cc_ == equal)) {
3087 __ Set(rax, EQUAL);
3088 __ ret(0);
3089 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003090 NearLabel heap_number;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003091 // If it's not a heap number, then return equal for (in)equality operator.
3092 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003093 FACTORY->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003094 __ j(equal, &heap_number);
3095 if (cc_ != equal) {
3096 // Call runtime on identical JSObjects. Otherwise return equal.
3097 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
3098 __ j(above_equal, &not_identical);
3099 }
3100 __ Set(rax, EQUAL);
3101 __ ret(0);
3102
3103 __ bind(&heap_number);
3104 // It is a heap number, so return equal if it's not NaN.
3105 // For NaN, return 1 for every condition except greater and
3106 // greater-equal. Return -1 for them, so the comparison yields
3107 // false for all conditions except not-equal.
3108 __ Set(rax, EQUAL);
3109 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3110 __ ucomisd(xmm0, xmm0);
3111 __ setcc(parity_even, rax);
3112 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
3113 if (cc_ == greater_equal || cc_ == greater) {
3114 __ neg(rax);
3115 }
3116 __ ret(0);
3117 }
3118
3119 __ bind(&not_identical);
3120 }
3121
3122 if (cc_ == equal) { // Both strict and non-strict.
3123 Label slow; // Fallthrough label.
3124
3125 // If we're doing a strict equality comparison, we don't have to do
3126 // type conversion, so we generate code to do fast comparison for objects
3127 // and oddballs. Non-smi numbers and strings still go through the usual
3128 // slow-case code.
3129 if (strict_) {
3130 // If either is a Smi (we know that not both are), then they can only
3131 // be equal if the other is a HeapNumber. If so, use the slow case.
3132 {
3133 Label not_smis;
3134 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
3135
3136 // Check if the non-smi operand is a heap number.
3137 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003138 FACTORY->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003139 // If heap number, handle it in the slow case.
3140 __ j(equal, &slow);
3141 // Return non-equal. ebx (the lower half of rbx) is not zero.
3142 __ movq(rax, rbx);
3143 __ ret(0);
3144
3145 __ bind(&not_smis);
3146 }
3147
3148 // If either operand is a JSObject or an oddball value, then they are not
3149 // equal since their pointers are different
3150 // There is no test for undetectability in strict equality.
3151
3152 // If the first object is a JS object, we have done pointer comparison.
3153 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003154 NearLabel first_non_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003155 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
3156 __ j(below, &first_non_object);
3157 // Return non-zero (eax (not rax) is not zero)
3158 Label return_not_equal;
3159 STATIC_ASSERT(kHeapObjectTag != 0);
3160 __ bind(&return_not_equal);
3161 __ ret(0);
3162
3163 __ bind(&first_non_object);
3164 // Check for oddballs: true, false, null, undefined.
3165 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3166 __ j(equal, &return_not_equal);
3167
3168 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
3169 __ j(above_equal, &return_not_equal);
3170
3171 // Check for oddballs: true, false, null, undefined.
3172 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3173 __ j(equal, &return_not_equal);
3174
3175 // Fall through to the general case.
3176 }
3177 __ bind(&slow);
3178 }
3179
3180 // Generate the number comparison code.
3181 if (include_number_compare_) {
3182 Label non_number_comparison;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003183 NearLabel unordered;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003184 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
3185 __ xorl(rax, rax);
3186 __ xorl(rcx, rcx);
3187 __ ucomisd(xmm0, xmm1);
3188
3189 // Don't base result on EFLAGS when a NaN is involved.
3190 __ j(parity_even, &unordered);
3191 // Return a result of -1, 0, or 1, based on EFLAGS.
3192 __ setcc(above, rax);
3193 __ setcc(below, rcx);
3194 __ subq(rax, rcx);
3195 __ ret(0);
3196
3197 // If one of the numbers was NaN, then the result is always false.
3198 // The cc is never not-equal.
3199 __ bind(&unordered);
3200 ASSERT(cc_ != not_equal);
3201 if (cc_ == less || cc_ == less_equal) {
3202 __ Set(rax, 1);
3203 } else {
3204 __ Set(rax, -1);
3205 }
3206 __ ret(0);
3207
3208 // The number comparison code did not provide a valid result.
3209 __ bind(&non_number_comparison);
3210 }
3211
3212 // Fast negative check for symbol-to-symbol equality.
3213 Label check_for_strings;
3214 if (cc_ == equal) {
3215 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
3216 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
3217
3218 // We've already checked for object identity, so if both operands
3219 // are symbols they aren't equal. Register eax (not rax) already holds a
3220 // non-zero value, which indicates not equal, so just return.
3221 __ ret(0);
3222 }
3223
3224 __ bind(&check_for_strings);
3225
3226 __ JumpIfNotBothSequentialAsciiStrings(
3227 rdx, rax, rcx, rbx, &check_unequal_objects);
3228
3229 // Inline comparison of ascii strings.
3230 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3231 rdx,
3232 rax,
3233 rcx,
3234 rbx,
3235 rdi,
3236 r8);
3237
3238#ifdef DEBUG
3239 __ Abort("Unexpected fall-through from string comparison");
3240#endif
3241
3242 __ bind(&check_unequal_objects);
3243 if (cc_ == equal && !strict_) {
3244 // Not strict equality. Objects are unequal if
3245 // they are both JSObjects and not undetectable,
3246 // and their pointers are different.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003247 NearLabel not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003248 // At most one is a smi, so we can test for smi by adding the two.
3249 // A smi plus a heap object has the low bit set, a heap object plus
3250 // a heap object has the low bit clear.
3251 STATIC_ASSERT(kSmiTag == 0);
3252 STATIC_ASSERT(kSmiTagMask == 1);
3253 __ lea(rcx, Operand(rax, rdx, times_1, 0));
3254 __ testb(rcx, Immediate(kSmiTagMask));
3255 __ j(not_zero, &not_both_objects);
3256 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
3257 __ j(below, &not_both_objects);
3258 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
3259 __ j(below, &not_both_objects);
3260 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
3261 Immediate(1 << Map::kIsUndetectable));
3262 __ j(zero, &return_unequal);
3263 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
3264 Immediate(1 << Map::kIsUndetectable));
3265 __ j(zero, &return_unequal);
3266 // The objects are both undetectable, so they both compare as the value
3267 // undefined, and are equal.
3268 __ Set(rax, EQUAL);
3269 __ bind(&return_unequal);
Steve Block1e0659c2011-05-24 12:43:12 +01003270 // Return non-equal by returning the non-zero object pointer in rax,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003271 // or return equal if we fell through to here.
3272 __ ret(0);
3273 __ bind(&not_both_objects);
3274 }
3275
3276 // Push arguments below the return address to prepare jump to builtin.
3277 __ pop(rcx);
3278 __ push(rdx);
3279 __ push(rax);
3280
3281 // Figure out which native to call and setup the arguments.
3282 Builtins::JavaScript builtin;
3283 if (cc_ == equal) {
3284 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
3285 } else {
3286 builtin = Builtins::COMPARE;
3287 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
3288 }
3289
3290 // Restore return address on the stack.
3291 __ push(rcx);
3292
3293 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
3294 // tagged as a small integer.
3295 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
3296}
3297
3298
3299void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
3300 Label* label,
3301 Register object,
3302 Register scratch) {
3303 __ JumpIfSmi(object, label);
3304 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3305 __ movzxbq(scratch,
3306 FieldOperand(scratch, Map::kInstanceTypeOffset));
3307 // Ensure that no non-strings have the symbol bit set.
3308 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
3309 STATIC_ASSERT(kSymbolTag != 0);
3310 __ testb(scratch, Immediate(kIsSymbolMask));
3311 __ j(zero, label);
3312}
3313
3314
3315void StackCheckStub::Generate(MacroAssembler* masm) {
Ben Murdochf87a2032010-10-22 12:50:53 +01003316 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003317}
3318
3319
3320void CallFunctionStub::Generate(MacroAssembler* masm) {
3321 Label slow;
3322
3323 // If the receiver might be a value (string, number or boolean) check for this
3324 // and box it if it is.
3325 if (ReceiverMightBeValue()) {
3326 // Get the receiver from the stack.
3327 // +1 ~ return address
3328 Label receiver_is_value, receiver_is_js_object;
3329 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
3330
3331 // Check if receiver is a smi (which is a number value).
3332 __ JumpIfSmi(rax, &receiver_is_value);
3333
3334 // Check if the receiver is a valid JS object.
3335 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
3336 __ j(above_equal, &receiver_is_js_object);
3337
3338 // Call the runtime to box the value.
3339 __ bind(&receiver_is_value);
3340 __ EnterInternalFrame();
3341 __ push(rax);
3342 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
3343 __ LeaveInternalFrame();
3344 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
3345
3346 __ bind(&receiver_is_js_object);
3347 }
3348
3349 // Get the function to call from the stack.
3350 // +2 ~ receiver, return address
3351 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
3352
3353 // Check that the function really is a JavaScript function.
3354 __ JumpIfSmi(rdi, &slow);
3355 // Goto slow case if we do not have a function.
3356 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
3357 __ j(not_equal, &slow);
3358
3359 // Fast-case: Just invoke the function.
3360 ParameterCount actual(argc_);
3361 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
3362
3363 // Slow-case: Non-function called.
3364 __ bind(&slow);
3365 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
3366 // of the original receiver from the call site).
3367 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
3368 __ Set(rax, argc_);
3369 __ Set(rbx, 0);
3370 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01003371 Handle<Code> adaptor =
3372 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003373 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3374}
3375
3376
Steve Block44f0eee2011-05-26 01:26:41 +01003377bool CEntryStub::NeedsImmovableCode() {
3378 return false;
3379}
3380
3381
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003382void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003383 // Throw exception in eax.
3384 __ Throw(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003385}
3386
3387
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003388void CEntryStub::GenerateCore(MacroAssembler* masm,
3389 Label* throw_normal_exception,
3390 Label* throw_termination_exception,
3391 Label* throw_out_of_memory_exception,
3392 bool do_gc,
Steve Block1e0659c2011-05-24 12:43:12 +01003393 bool always_allocate_scope) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003394 // rax: result parameter for PerformGC, if any.
3395 // rbx: pointer to C function (C callee-saved).
3396 // rbp: frame pointer (restored after C call).
3397 // rsp: stack pointer (restored after C call).
3398 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01003399 // r15: pointer to the first argument (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003400 // This pointer is reused in LeaveExitFrame(), so it is stored in a
3401 // callee-saved register.
3402
3403 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
3404 // Complex results must be written to address passed as first argument.
3405 // AMD64 calling convention: a struct of two pointers in rax+rdx
3406
3407 // Check stack alignment.
3408 if (FLAG_debug_code) {
3409 __ CheckStackAlignment();
3410 }
3411
3412 if (do_gc) {
3413 // Pass failure code returned from last attempt as first argument to
3414 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
3415 // stack is known to be aligned. This function takes one argument which is
3416 // passed in register.
3417#ifdef _WIN64
3418 __ movq(rcx, rax);
3419#else // _WIN64
3420 __ movq(rdi, rax);
3421#endif
3422 __ movq(kScratchRegister,
3423 FUNCTION_ADDR(Runtime::PerformGC),
3424 RelocInfo::RUNTIME_ENTRY);
3425 __ call(kScratchRegister);
3426 }
3427
3428 ExternalReference scope_depth =
Steve Block44f0eee2011-05-26 01:26:41 +01003429 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003430 if (always_allocate_scope) {
Steve Block44f0eee2011-05-26 01:26:41 +01003431 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3432 __ incl(scope_depth_operand);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003433 }
3434
3435 // Call C function.
3436#ifdef _WIN64
3437 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
3438 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003439 __ movq(StackSpaceOperand(0), r14); // argc.
Steve Block44f0eee2011-05-26 01:26:41 +01003440 __ movq(StackSpaceOperand(1), r15); // argv.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003441 if (result_size_ < 2) {
3442 // Pass a pointer to the Arguments object as the first argument.
3443 // Return result in single register (rax).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003444 __ lea(rcx, StackSpaceOperand(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003445 __ LoadAddress(rdx, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003446 } else {
3447 ASSERT_EQ(2, result_size_);
3448 // Pass a pointer to the result location as the first argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003449 __ lea(rcx, StackSpaceOperand(2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003450 // Pass a pointer to the Arguments object as the second argument.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003451 __ lea(rdx, StackSpaceOperand(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003452 __ LoadAddress(r8, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003453 }
3454
3455#else // _WIN64
3456 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
3457 __ movq(rdi, r14); // argc.
Steve Block44f0eee2011-05-26 01:26:41 +01003458 __ movq(rsi, r15); // argv.
3459 __ movq(rdx, ExternalReference::isolate_address());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003460#endif
3461 __ call(rbx);
3462 // Result is in rax - do not destroy this register!
3463
3464 if (always_allocate_scope) {
Steve Block44f0eee2011-05-26 01:26:41 +01003465 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
3466 __ decl(scope_depth_operand);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003467 }
3468
3469 // Check for failure result.
3470 Label failure_returned;
3471 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3472#ifdef _WIN64
3473 // If return value is on the stack, pop it to registers.
3474 if (result_size_ > 1) {
3475 ASSERT_EQ(2, result_size_);
3476 // Read result values stored on stack. Result is stored
3477 // above the four argument mirror slots and the two
3478 // Arguments object slots.
3479 __ movq(rax, Operand(rsp, 6 * kPointerSize));
3480 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
3481 }
3482#endif
3483 __ lea(rcx, Operand(rax, 1));
3484 // Lower 2 bits of rcx are 0 iff rax has failure tag.
3485 __ testl(rcx, Immediate(kFailureTagMask));
3486 __ j(zero, &failure_returned);
3487
3488 // Exit the JavaScript to C++ exit frame.
Steve Block1e0659c2011-05-24 12:43:12 +01003489 __ LeaveExitFrame(save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003490 __ ret(0);
3491
3492 // Handling of failure.
3493 __ bind(&failure_returned);
3494
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003495 NearLabel retry;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003496 // If the returned exception is RETRY_AFTER_GC continue at retry label
3497 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3498 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3499 __ j(zero, &retry);
3500
3501 // Special handling of out of memory exceptions.
3502 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
3503 __ cmpq(rax, kScratchRegister);
3504 __ j(equal, throw_out_of_memory_exception);
3505
3506 // Retrieve the pending exception and clear the variable.
Steve Block44f0eee2011-05-26 01:26:41 +01003507 ExternalReference pending_exception_address(
3508 Isolate::k_pending_exception_address, masm->isolate());
3509 Operand pending_exception_operand =
3510 masm->ExternalOperand(pending_exception_address);
3511 __ movq(rax, pending_exception_operand);
3512 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3513 __ movq(pending_exception_operand, rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003514
3515 // Special handling of termination exceptions which are uncatchable
3516 // by javascript code.
3517 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
3518 __ j(equal, throw_termination_exception);
3519
3520 // Handle normal exception.
3521 __ jmp(throw_normal_exception);
3522
3523 // Retry.
3524 __ bind(&retry);
3525}
3526
3527
3528void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
3529 UncatchableExceptionType type) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003530 __ ThrowUncatchable(type, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003531}
3532
3533
3534void CEntryStub::Generate(MacroAssembler* masm) {
3535 // rax: number of arguments including receiver
3536 // rbx: pointer to C function (C callee-saved)
3537 // rbp: frame pointer of calling JS frame (restored after C call)
3538 // rsp: stack pointer (restored after C call)
3539 // rsi: current context (restored)
3540
3541 // NOTE: Invocations of builtins may return failure objects
3542 // instead of a proper result. The builtin entry handles
3543 // this by performing a garbage collection and retrying the
3544 // builtin once.
3545
3546 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003547#ifdef _WIN64
3548 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
3549#else
3550 int arg_stack_space = 0;
3551#endif
Steve Block1e0659c2011-05-24 12:43:12 +01003552 __ EnterExitFrame(arg_stack_space, save_doubles_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003553
3554 // rax: Holds the context at this point, but should not be used.
3555 // On entry to code generated by GenerateCore, it must hold
3556 // a failure result if the collect_garbage argument to GenerateCore
3557 // is true. This failure result can be the result of code
3558 // generated by a previous call to GenerateCore. The value
3559 // of rax is then passed to Runtime::PerformGC.
3560 // rbx: pointer to builtin function (C callee-saved).
3561 // rbp: frame pointer of exit frame (restored after C call).
3562 // rsp: stack pointer (restored after C call).
3563 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01003564 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003565
3566 Label throw_normal_exception;
3567 Label throw_termination_exception;
3568 Label throw_out_of_memory_exception;
3569
3570 // Call into the runtime system.
3571 GenerateCore(masm,
3572 &throw_normal_exception,
3573 &throw_termination_exception,
3574 &throw_out_of_memory_exception,
3575 false,
3576 false);
3577
3578 // Do space-specific GC and retry runtime call.
3579 GenerateCore(masm,
3580 &throw_normal_exception,
3581 &throw_termination_exception,
3582 &throw_out_of_memory_exception,
3583 true,
3584 false);
3585
3586 // Do full GC and retry runtime call one final time.
3587 Failure* failure = Failure::InternalError();
3588 __ movq(rax, failure, RelocInfo::NONE);
3589 GenerateCore(masm,
3590 &throw_normal_exception,
3591 &throw_termination_exception,
3592 &throw_out_of_memory_exception,
3593 true,
3594 true);
3595
3596 __ bind(&throw_out_of_memory_exception);
3597 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
3598
3599 __ bind(&throw_termination_exception);
3600 GenerateThrowUncatchable(masm, TERMINATION);
3601
3602 __ bind(&throw_normal_exception);
3603 GenerateThrowTOS(masm);
3604}
3605
3606
3607void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3608 Label invoke, exit;
3609#ifdef ENABLE_LOGGING_AND_PROFILING
3610 Label not_outermost_js, not_outermost_js_2;
3611#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003612 { // NOLINT. Scope block confuses linter.
3613 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
3614 // Setup frame.
3615 __ push(rbp);
3616 __ movq(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003617
Steve Block44f0eee2011-05-26 01:26:41 +01003618 // Push the stack frame type marker twice.
3619 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3620 // Scratch register is neither callee-save, nor an argument register on any
3621 // platform. It's free to use at this point.
3622 // Cannot use smi-register for loading yet.
3623 __ movq(kScratchRegister,
3624 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3625 RelocInfo::NONE);
3626 __ push(kScratchRegister); // context slot
3627 __ push(kScratchRegister); // function slot
3628 // Save callee-saved registers (X64/Win64 calling conventions).
3629 __ push(r12);
3630 __ push(r13);
3631 __ push(r14);
3632 __ push(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003633#ifdef _WIN64
Steve Block44f0eee2011-05-26 01:26:41 +01003634 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3635 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003636#endif
Steve Block44f0eee2011-05-26 01:26:41 +01003637 __ push(rbx);
3638 // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
3639 // callee save as well.
3640
3641 // Set up the roots and smi constant registers.
3642 // Needs to be done before any further smi loads.
3643 __ InitializeSmiConstantRegister();
3644 __ InitializeRootRegister();
3645 }
3646
3647 Isolate* isolate = masm->isolate();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003648
3649 // Save copies of the top frame descriptor on the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01003650 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate);
3651 {
3652 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3653 __ push(c_entry_fp_operand);
3654 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003655
3656#ifdef ENABLE_LOGGING_AND_PROFILING
3657 // If this is the outermost JS call, set js_entry_sp value.
Steve Block44f0eee2011-05-26 01:26:41 +01003658 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
3659 __ Load(rax, js_entry_sp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003660 __ testq(rax, rax);
3661 __ j(not_zero, &not_outermost_js);
3662 __ movq(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01003663 __ Store(js_entry_sp, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003664 __ bind(&not_outermost_js);
3665#endif
3666
3667 // Call a faked try-block that does the invoke.
3668 __ call(&invoke);
3669
3670 // Caught exception: Store result (exception) in the pending
3671 // exception field in the JSEnv and return a failure sentinel.
Steve Block44f0eee2011-05-26 01:26:41 +01003672 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3673 isolate);
3674 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003675 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
3676 __ jmp(&exit);
3677
3678 // Invoke: Link this frame into the handler chain.
3679 __ bind(&invoke);
3680 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
3681
3682 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01003683 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
3684 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003685
3686 // Fake a receiver (NULL).
3687 __ push(Immediate(0)); // receiver
3688
3689 // Invoke the function by calling through JS entry trampoline
3690 // builtin and pop the faked function when we return. We load the address
3691 // from an external reference instead of inlining the call target address
3692 // directly in the code, because the builtin stubs may not have been
3693 // generated yet at the time this code is generated.
3694 if (is_construct) {
Steve Block44f0eee2011-05-26 01:26:41 +01003695 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3696 isolate);
3697 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003698 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003699 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
3700 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003701 }
3702 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3703 __ call(kScratchRegister);
3704
3705 // Unlink this frame from the handler chain.
Steve Block44f0eee2011-05-26 01:26:41 +01003706 Operand handler_operand =
3707 masm->ExternalOperand(ExternalReference(Isolate::k_handler_address,
3708 isolate));
3709 __ pop(handler_operand);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003710 // Pop next_sp.
3711 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
3712
3713#ifdef ENABLE_LOGGING_AND_PROFILING
Steve Block1e0659c2011-05-24 12:43:12 +01003714 // If current RBP value is the same as js_entry_sp value, it means that
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003715 // the current function is the outermost.
3716 __ movq(kScratchRegister, js_entry_sp);
3717 __ cmpq(rbp, Operand(kScratchRegister, 0));
3718 __ j(not_equal, &not_outermost_js_2);
3719 __ movq(Operand(kScratchRegister, 0), Immediate(0));
3720 __ bind(&not_outermost_js_2);
3721#endif
3722
3723 // Restore the top frame descriptor from the stack.
3724 __ bind(&exit);
Steve Block44f0eee2011-05-26 01:26:41 +01003725 {
3726 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3727 __ pop(c_entry_fp_operand);
3728 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003729
3730 // Restore callee-saved registers (X64 conventions).
3731 __ pop(rbx);
3732#ifdef _WIN64
3733 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
3734 __ pop(rsi);
3735 __ pop(rdi);
3736#endif
3737 __ pop(r15);
3738 __ pop(r14);
3739 __ pop(r13);
3740 __ pop(r12);
3741 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3742
3743 // Restore frame pointer and return.
3744 __ pop(rbp);
3745 __ ret(0);
3746}
3747
3748
3749void InstanceofStub::Generate(MacroAssembler* masm) {
3750 // Implements "value instanceof function" operator.
Steve Block44f0eee2011-05-26 01:26:41 +01003751 // Expected input state with no inline cache:
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003752 // rsp[0] : return address
3753 // rsp[1] : function pointer
3754 // rsp[2] : value
Steve Block44f0eee2011-05-26 01:26:41 +01003755 // Expected input state with an inline one-element cache:
3756 // rsp[0] : return address
3757 // rsp[1] : offset from return address to location of inline cache
3758 // rsp[2] : function pointer
3759 // rsp[3] : value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003760 // Returns a bitwise zero to indicate that the value
3761 // is and instance of the function and anything else to
3762 // indicate that the value is not an instance.
3763
Steve Block44f0eee2011-05-26 01:26:41 +01003764 static const int kOffsetToMapCheckValue = 5;
3765 static const int kOffsetToResultValue = 21;
3766 // The last 4 bytes of the instruction sequence
3767 // movq(rax, FieldOperand(rdi, HeapObject::kMapOffset)
3768 // Move(kScratchRegister, FACTORY->the_hole_value())
3769 // in front of the hole value address.
3770 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
3771 // The last 4 bytes of the instruction sequence
3772 // __ j(not_equal, &cache_miss);
3773 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
3774 // before the offset of the hole value in the root array.
3775 static const unsigned int kWordBeforeResultValue = 0x458B4909;
3776 // Only the inline check flag is supported on X64.
3777 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
3778 int extra_stack_space = HasCallSiteInlineCheck() ? kPointerSize : 0;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003779
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003780 // Get the object - go slow case if it's a smi.
3781 Label slow;
Steve Block44f0eee2011-05-26 01:26:41 +01003782
3783 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003784 __ JumpIfSmi(rax, &slow);
3785
3786 // Check that the left hand is a JS object. Leave its map in rax.
3787 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
3788 __ j(below, &slow);
3789 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
3790 __ j(above, &slow);
3791
3792 // Get the prototype of the function.
Steve Block44f0eee2011-05-26 01:26:41 +01003793 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003794 // rdx is function, rax is map.
3795
Steve Block44f0eee2011-05-26 01:26:41 +01003796 // If there is a call site cache don't look in the global cache, but do the
3797 // real lookup and update the call site cache.
3798 if (!HasCallSiteInlineCheck()) {
3799 // Look up the function and the map in the instanceof cache.
3800 NearLabel miss;
3801 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3802 __ j(not_equal, &miss);
3803 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3804 __ j(not_equal, &miss);
3805 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3806 __ ret(2 * kPointerSize);
3807 __ bind(&miss);
3808 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003809
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003810 __ TryGetFunctionPrototype(rdx, rbx, &slow);
3811
3812 // Check that the function prototype is a JS object.
3813 __ JumpIfSmi(rbx, &slow);
3814 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
3815 __ j(below, &slow);
3816 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3817 __ j(above, &slow);
3818
3819 // Register mapping:
3820 // rax is object map.
3821 // rdx is function.
3822 // rbx is function prototype.
Steve Block44f0eee2011-05-26 01:26:41 +01003823 if (!HasCallSiteInlineCheck()) {
3824 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3825 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3826 } else {
3827 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3828 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3829 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3830 if (FLAG_debug_code) {
3831 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3832 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3833 __ Assert(equal, "InstanceofStub unexpected call site cache.");
3834 }
3835 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003836
3837 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3838
3839 // Loop through the prototype chain looking for the function prototype.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003840 NearLabel loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003841 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
3842 __ bind(&loop);
3843 __ cmpq(rcx, rbx);
3844 __ j(equal, &is_instance);
3845 __ cmpq(rcx, kScratchRegister);
3846 // The code at is_not_instance assumes that kScratchRegister contains a
3847 // non-zero GCable value (the null object in this case).
3848 __ j(equal, &is_not_instance);
3849 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3850 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
3851 __ jmp(&loop);
3852
3853 __ bind(&is_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01003854 if (!HasCallSiteInlineCheck()) {
3855 __ xorl(rax, rax);
3856 // Store bitwise zero in the cache. This is a Smi in GC terms.
3857 STATIC_ASSERT(kSmiTag == 0);
3858 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3859 } else {
3860 // Store offset of true in the root array at the inline check site.
3861 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3862 == 0xB0 - 0x100);
3863 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize.
3864 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3865 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3866 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3867 if (FLAG_debug_code) {
3868 __ movl(rax, Immediate(kWordBeforeResultValue));
3869 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3870 __ Assert(equal, "InstanceofStub unexpected call site cache.");
3871 }
3872 __ xorl(rax, rax);
3873 }
3874 __ ret(2 * kPointerSize + extra_stack_space);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003875
3876 __ bind(&is_not_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01003877 if (!HasCallSiteInlineCheck()) {
3878 // We have to store a non-zero value in the cache.
3879 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3880 } else {
3881 // Store offset of false in the root array at the inline check site.
3882 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias
3883 == 0xB8 - 0x100);
3884 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize.
3885 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3886 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3887 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3888 if (FLAG_debug_code) {
3889 __ movl(rax, Immediate(kWordBeforeResultValue));
3890 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3891 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3892 }
3893 }
3894 __ ret(2 * kPointerSize + extra_stack_space);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003895
3896 // Slow-case: Go through the JavaScript implementation.
3897 __ bind(&slow);
Steve Block44f0eee2011-05-26 01:26:41 +01003898 if (HasCallSiteInlineCheck()) {
3899 // Remove extra value from the stack.
3900 __ pop(rcx);
3901 __ pop(rax);
3902 __ push(rcx);
3903 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003904 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3905}
3906
3907
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003908// Passing arguments in registers is not supported.
3909Register InstanceofStub::left() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003910
3911
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003912Register InstanceofStub::right() { return no_reg; }
Steve Block1e0659c2011-05-24 12:43:12 +01003913
3914
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003915int CompareStub::MinorKey() {
3916 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3917 // stubs the never NaN NaN condition is only taken into account if the
3918 // condition is equals.
3919 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
3920 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3921 return ConditionField::encode(static_cast<unsigned>(cc_))
3922 | RegisterField::encode(false) // lhs_ and rhs_ are not used
3923 | StrictField::encode(strict_)
3924 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003925 | IncludeNumberCompareField::encode(include_number_compare_)
3926 | IncludeSmiCompareField::encode(include_smi_compare_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003927}
3928
3929
3930// Unfortunately you have to run without snapshots to see most of these
3931// names in the profile since most compare stubs end up in the snapshot.
3932const char* CompareStub::GetName() {
3933 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3934
3935 if (name_ != NULL) return name_;
3936 const int kMaxNameLength = 100;
Steve Block44f0eee2011-05-26 01:26:41 +01003937 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
3938 kMaxNameLength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003939 if (name_ == NULL) return "OOM";
3940
3941 const char* cc_name;
3942 switch (cc_) {
3943 case less: cc_name = "LT"; break;
3944 case greater: cc_name = "GT"; break;
3945 case less_equal: cc_name = "LE"; break;
3946 case greater_equal: cc_name = "GE"; break;
3947 case equal: cc_name = "EQ"; break;
3948 case not_equal: cc_name = "NE"; break;
3949 default: cc_name = "UnknownCondition"; break;
3950 }
3951
3952 const char* strict_name = "";
3953 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
3954 strict_name = "_STRICT";
3955 }
3956
3957 const char* never_nan_nan_name = "";
3958 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
3959 never_nan_nan_name = "_NO_NAN";
3960 }
3961
3962 const char* include_number_compare_name = "";
3963 if (!include_number_compare_) {
3964 include_number_compare_name = "_NO_NUMBER";
3965 }
3966
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003967 const char* include_smi_compare_name = "";
3968 if (!include_smi_compare_) {
3969 include_smi_compare_name = "_NO_SMI";
3970 }
3971
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003972 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3973 "CompareStub_%s%s%s%s",
3974 cc_name,
3975 strict_name,
3976 never_nan_nan_name,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003977 include_number_compare_name,
3978 include_smi_compare_name);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003979 return name_;
3980}
3981
3982
3983// -------------------------------------------------------------------------
3984// StringCharCodeAtGenerator
3985
3986void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3987 Label flat_string;
3988 Label ascii_string;
3989 Label got_char_code;
3990
3991 // If the receiver is a smi trigger the non-string case.
3992 __ JumpIfSmi(object_, receiver_not_string_);
3993
3994 // Fetch the instance type of the receiver into result register.
3995 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3996 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3997 // If the receiver is not a string trigger the non-string case.
3998 __ testb(result_, Immediate(kIsNotStringMask));
3999 __ j(not_zero, receiver_not_string_);
4000
4001 // If the index is non-smi trigger the non-smi case.
4002 __ JumpIfNotSmi(index_, &index_not_smi_);
4003
4004 // Put smi-tagged index into scratch register.
4005 __ movq(scratch_, index_);
4006 __ bind(&got_smi_index_);
4007
4008 // Check for index out of range.
4009 __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
4010 __ j(above_equal, index_out_of_range_);
4011
4012 // We need special handling for non-flat strings.
4013 STATIC_ASSERT(kSeqStringTag == 0);
4014 __ testb(result_, Immediate(kStringRepresentationMask));
4015 __ j(zero, &flat_string);
4016
4017 // Handle non-flat strings.
4018 __ testb(result_, Immediate(kIsConsStringMask));
4019 __ j(zero, &call_runtime_);
4020
4021 // ConsString.
4022 // Check whether the right hand side is the empty string (i.e. if
4023 // this is really a flat string in a cons string). If that is not
4024 // the case we would rather go to the runtime system now to flatten
4025 // the string.
4026 __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
4027 Heap::kEmptyStringRootIndex);
4028 __ j(not_equal, &call_runtime_);
4029 // Get the first of the two strings and load its instance type.
4030 __ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
4031 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
4032 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4033 // If the first cons component is also non-flat, then go to runtime.
4034 STATIC_ASSERT(kSeqStringTag == 0);
4035 __ testb(result_, Immediate(kStringRepresentationMask));
4036 __ j(not_zero, &call_runtime_);
4037
4038 // Check for 1-byte or 2-byte string.
4039 __ bind(&flat_string);
4040 STATIC_ASSERT(kAsciiStringTag != 0);
4041 __ testb(result_, Immediate(kStringEncodingMask));
4042 __ j(not_zero, &ascii_string);
4043
4044 // 2-byte string.
4045 // Load the 2-byte character code into the result register.
4046 __ SmiToInteger32(scratch_, scratch_);
4047 __ movzxwl(result_, FieldOperand(object_,
4048 scratch_, times_2,
4049 SeqTwoByteString::kHeaderSize));
4050 __ jmp(&got_char_code);
4051
4052 // ASCII string.
4053 // Load the byte into the result register.
4054 __ bind(&ascii_string);
4055 __ SmiToInteger32(scratch_, scratch_);
4056 __ movzxbl(result_, FieldOperand(object_,
4057 scratch_, times_1,
4058 SeqAsciiString::kHeaderSize));
4059 __ bind(&got_char_code);
4060 __ Integer32ToSmi(result_, result_);
4061 __ bind(&exit_);
4062}
4063
4064
4065void StringCharCodeAtGenerator::GenerateSlow(
4066 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4067 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
4068
4069 // Index is not a smi.
4070 __ bind(&index_not_smi_);
4071 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01004072 __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004073 call_helper.BeforeCall(masm);
4074 __ push(object_);
4075 __ push(index_);
4076 __ push(index_); // Consumed by runtime conversion function.
4077 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
4078 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4079 } else {
4080 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
4081 // NumberToSmi discards numbers that are not exact integers.
4082 __ CallRuntime(Runtime::kNumberToSmi, 1);
4083 }
4084 if (!scratch_.is(rax)) {
4085 // Save the conversion result before the pop instructions below
4086 // have a chance to overwrite it.
4087 __ movq(scratch_, rax);
4088 }
4089 __ pop(index_);
4090 __ pop(object_);
4091 // Reload the instance type.
4092 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
4093 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
4094 call_helper.AfterCall(masm);
4095 // If index is still not a smi, it must be out of range.
4096 __ JumpIfNotSmi(scratch_, index_out_of_range_);
4097 // Otherwise, return to the fast path.
4098 __ jmp(&got_smi_index_);
4099
4100 // Call runtime. We get here when the receiver is a string and the
4101 // index is a number, but the code of getting the actual character
4102 // is too complex (e.g., when the string needs to be flattened).
4103 __ bind(&call_runtime_);
4104 call_helper.BeforeCall(masm);
4105 __ push(object_);
4106 __ push(index_);
4107 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4108 if (!result_.is(rax)) {
4109 __ movq(result_, rax);
4110 }
4111 call_helper.AfterCall(masm);
4112 __ jmp(&exit_);
4113
4114 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
4115}
4116
4117
4118// -------------------------------------------------------------------------
4119// StringCharFromCodeGenerator
4120
4121void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4122 // Fast case of Heap::LookupSingleCharacterStringFromCode.
4123 __ JumpIfNotSmi(code_, &slow_case_);
4124 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode));
4125 __ j(above, &slow_case_);
4126
4127 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
4128 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
4129 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
4130 FixedArray::kHeaderSize));
4131 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
4132 __ j(equal, &slow_case_);
4133 __ bind(&exit_);
4134}
4135
4136
4137void StringCharFromCodeGenerator::GenerateSlow(
4138 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4139 __ Abort("Unexpected fallthrough to CharFromCode slow case");
4140
4141 __ bind(&slow_case_);
4142 call_helper.BeforeCall(masm);
4143 __ push(code_);
4144 __ CallRuntime(Runtime::kCharFromCode, 1);
4145 if (!result_.is(rax)) {
4146 __ movq(result_, rax);
4147 }
4148 call_helper.AfterCall(masm);
4149 __ jmp(&exit_);
4150
4151 __ Abort("Unexpected fallthrough from CharFromCode slow case");
4152}
4153
4154
4155// -------------------------------------------------------------------------
4156// StringCharAtGenerator
4157
4158void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
4159 char_code_at_generator_.GenerateFast(masm);
4160 char_from_code_generator_.GenerateFast(masm);
4161}
4162
4163
4164void StringCharAtGenerator::GenerateSlow(
4165 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4166 char_code_at_generator_.GenerateSlow(masm, call_helper);
4167 char_from_code_generator_.GenerateSlow(masm, call_helper);
4168}
4169
4170
4171void StringAddStub::Generate(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004172 Label string_add_runtime, call_builtin;
4173 Builtins::JavaScript builtin_id = Builtins::ADD;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004174
4175 // Load the two arguments.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004176 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
4177 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004178
4179 // Make sure that both arguments are strings if not known in advance.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004180 if (flags_ == NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004181 Condition is_smi;
4182 is_smi = masm->CheckSmi(rax);
4183 __ j(is_smi, &string_add_runtime);
4184 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4185 __ j(above_equal, &string_add_runtime);
4186
4187 // First argument is a a string, test second.
4188 is_smi = masm->CheckSmi(rdx);
4189 __ j(is_smi, &string_add_runtime);
4190 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
4191 __ j(above_equal, &string_add_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004192 } else {
4193 // Here at least one of the arguments is definitely a string.
4194 // We convert the one that is not known to be a string.
4195 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) {
4196 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0);
4197 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
4198 &call_builtin);
4199 builtin_id = Builtins::STRING_ADD_RIGHT;
4200 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) {
4201 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0);
4202 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
4203 &call_builtin);
4204 builtin_id = Builtins::STRING_ADD_LEFT;
4205 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004206 }
4207
4208 // Both arguments are strings.
4209 // rax: first string
4210 // rdx: second string
4211 // Check if either of the strings are empty. In that case return the other.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004212 NearLabel second_not_zero_length, both_not_zero_length;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004213 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
4214 __ SmiTest(rcx);
4215 __ j(not_zero, &second_not_zero_length);
4216 // Second string is empty, result is first string which is already in rax.
Steve Block44f0eee2011-05-26 01:26:41 +01004217 Counters* counters = masm->isolate()->counters();
4218 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004219 __ ret(2 * kPointerSize);
4220 __ bind(&second_not_zero_length);
4221 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
4222 __ SmiTest(rbx);
4223 __ j(not_zero, &both_not_zero_length);
4224 // First string is empty, result is second string which is in rdx.
4225 __ movq(rax, rdx);
Steve Block44f0eee2011-05-26 01:26:41 +01004226 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004227 __ ret(2 * kPointerSize);
4228
4229 // Both strings are non-empty.
4230 // rax: first string
4231 // rbx: length of first string
4232 // rcx: length of second string
4233 // rdx: second string
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004234 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
4235 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004236 Label string_add_flat_result, longer_than_two;
4237 __ bind(&both_not_zero_length);
4238
4239 // If arguments where known to be strings, maps are not loaded to r8 and r9
4240 // by the code above.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004241 if (flags_ != NO_STRING_ADD_FLAGS) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004242 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
4243 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
4244 }
4245 // Get the instance types of the two strings as they will be needed soon.
4246 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
4247 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
4248
4249 // Look at the length of the result of adding the two strings.
4250 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004251 __ SmiAdd(rbx, rbx, rcx);
Steve Block44f0eee2011-05-26 01:26:41 +01004252 // Use the symbol table when adding two one character strings, as it
4253 // helps later optimizations to return a symbol here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004254 __ SmiCompare(rbx, Smi::FromInt(2));
4255 __ j(not_equal, &longer_than_two);
4256
4257 // Check that both strings are non-external ascii strings.
4258 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
4259 &string_add_runtime);
4260
4261 // Get the two characters forming the sub string.
4262 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4263 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
4264
4265 // Try to lookup two character string in symbol table. If it is not found
4266 // just allocate a new one.
4267 Label make_two_character_string, make_flat_ascii_string;
4268 StringHelper::GenerateTwoCharacterSymbolTableProbe(
Steve Block44f0eee2011-05-26 01:26:41 +01004269 masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
4270 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004271 __ ret(2 * kPointerSize);
4272
4273 __ bind(&make_two_character_string);
4274 __ Set(rbx, 2);
4275 __ jmp(&make_flat_ascii_string);
4276
4277 __ bind(&longer_than_two);
4278 // Check if resulting string will be flat.
4279 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
4280 __ j(below, &string_add_flat_result);
4281 // Handle exceptionally long strings in the runtime system.
4282 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
4283 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
4284 __ j(above, &string_add_runtime);
4285
4286 // If result is not supposed to be flat, allocate a cons string object. If
4287 // both strings are ascii the result is an ascii cons string.
4288 // rax: first string
4289 // rbx: length of resulting flat string
4290 // rdx: second string
4291 // r8: instance type of first string
4292 // r9: instance type of second string
4293 Label non_ascii, allocated, ascii_data;
4294 __ movl(rcx, r8);
4295 __ and_(rcx, r9);
4296 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4297 __ testl(rcx, Immediate(kAsciiStringTag));
4298 __ j(zero, &non_ascii);
4299 __ bind(&ascii_data);
4300 // Allocate an acsii cons string.
4301 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
4302 __ bind(&allocated);
4303 // Fill the fields of the cons string.
4304 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
4305 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
4306 Immediate(String::kEmptyHashField));
4307 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
4308 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
4309 __ movq(rax, rcx);
Steve Block44f0eee2011-05-26 01:26:41 +01004310 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004311 __ ret(2 * kPointerSize);
4312 __ bind(&non_ascii);
4313 // At least one of the strings is two-byte. Check whether it happens
4314 // to contain only ascii characters.
4315 // rcx: first instance type AND second instance type.
4316 // r8: first instance type.
4317 // r9: second instance type.
4318 __ testb(rcx, Immediate(kAsciiDataHintMask));
4319 __ j(not_zero, &ascii_data);
4320 __ xor_(r8, r9);
4321 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
4322 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4323 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
4324 __ j(equal, &ascii_data);
4325 // Allocate a two byte cons string.
4326 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
4327 __ jmp(&allocated);
4328
4329 // Handle creating a flat result. First check that both strings are not
4330 // external strings.
4331 // rax: first string
4332 // rbx: length of resulting flat string as smi
4333 // rdx: second string
4334 // r8: instance type of first string
4335 // r9: instance type of first string
4336 __ bind(&string_add_flat_result);
4337 __ SmiToInteger32(rbx, rbx);
4338 __ movl(rcx, r8);
4339 __ and_(rcx, Immediate(kStringRepresentationMask));
4340 __ cmpl(rcx, Immediate(kExternalStringTag));
4341 __ j(equal, &string_add_runtime);
4342 __ movl(rcx, r9);
4343 __ and_(rcx, Immediate(kStringRepresentationMask));
4344 __ cmpl(rcx, Immediate(kExternalStringTag));
4345 __ j(equal, &string_add_runtime);
4346 // Now check if both strings are ascii strings.
4347 // rax: first string
4348 // rbx: length of resulting flat string
4349 // rdx: second string
4350 // r8: instance type of first string
4351 // r9: instance type of second string
4352 Label non_ascii_string_add_flat_result;
4353 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
4354 __ testl(r8, Immediate(kAsciiStringTag));
4355 __ j(zero, &non_ascii_string_add_flat_result);
4356 __ testl(r9, Immediate(kAsciiStringTag));
4357 __ j(zero, &string_add_runtime);
4358
4359 __ bind(&make_flat_ascii_string);
4360 // Both strings are ascii strings. As they are short they are both flat.
4361 __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4362 // rcx: result string
4363 __ movq(rbx, rcx);
4364 // Locate first character of result.
4365 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4366 // Locate first character of first argument
4367 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4368 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4369 // rax: first char of first argument
4370 // rbx: result string
4371 // rcx: first character of result
4372 // rdx: second string
4373 // rdi: length of first argument
4374 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
4375 // Locate first character of second argument.
4376 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4377 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
4378 // rbx: result string
4379 // rcx: next character of result
4380 // rdx: first char of second argument
4381 // rdi: length of second argument
4382 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
4383 __ movq(rax, rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01004384 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004385 __ ret(2 * kPointerSize);
4386
4387 // Handle creating a flat two byte result.
4388 // rax: first string - known to be two byte
4389 // rbx: length of resulting flat string
4390 // rdx: second string
4391 // r8: instance type of first string
4392 // r9: instance type of first string
4393 __ bind(&non_ascii_string_add_flat_result);
4394 __ and_(r9, Immediate(kAsciiStringTag));
4395 __ j(not_zero, &string_add_runtime);
4396 // Both strings are two byte strings. As they are short they are both
4397 // flat.
4398 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
4399 // rcx: result string
4400 __ movq(rbx, rcx);
4401 // Locate first character of result.
4402 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4403 // Locate first character of first argument.
4404 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
4405 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4406 // rax: first char of first argument
4407 // rbx: result string
4408 // rcx: first character of result
4409 // rdx: second argument
4410 // rdi: length of first argument
4411 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
4412 // Locate first character of second argument.
4413 __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
4414 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4415 // rbx: result string
4416 // rcx: next character of result
4417 // rdx: first char of second argument
4418 // rdi: length of second argument
4419 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
4420 __ movq(rax, rbx);
Steve Block44f0eee2011-05-26 01:26:41 +01004421 __ IncrementCounter(counters->string_add_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004422 __ ret(2 * kPointerSize);
4423
4424 // Just jump to runtime to add the two strings.
4425 __ bind(&string_add_runtime);
4426 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004427
4428 if (call_builtin.is_linked()) {
4429 __ bind(&call_builtin);
4430 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
4431 }
4432}
4433
4434
4435void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4436 int stack_offset,
4437 Register arg,
4438 Register scratch1,
4439 Register scratch2,
4440 Register scratch3,
4441 Label* slow) {
4442 // First check if the argument is already a string.
4443 Label not_string, done;
4444 __ JumpIfSmi(arg, &not_string);
4445 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
4446 __ j(below, &done);
4447
4448 // Check the number to string cache.
4449 Label not_cached;
4450 __ bind(&not_string);
4451 // Puts the cached result into scratch1.
4452 NumberToStringStub::GenerateLookupNumberStringCache(masm,
4453 arg,
4454 scratch1,
4455 scratch2,
4456 scratch3,
4457 false,
4458 &not_cached);
4459 __ movq(arg, scratch1);
4460 __ movq(Operand(rsp, stack_offset), arg);
4461 __ jmp(&done);
4462
4463 // Check if the argument is a safe string wrapper.
4464 __ bind(&not_cached);
4465 __ JumpIfSmi(arg, slow);
4466 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
4467 __ j(not_equal, slow);
4468 __ testb(FieldOperand(scratch1, Map::kBitField2Offset),
4469 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
4470 __ j(zero, slow);
4471 __ movq(arg, FieldOperand(arg, JSValue::kValueOffset));
4472 __ movq(Operand(rsp, stack_offset), arg);
4473
4474 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004475}
4476
4477
4478void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
4479 Register dest,
4480 Register src,
4481 Register count,
4482 bool ascii) {
4483 Label loop;
4484 __ bind(&loop);
4485 // This loop just copies one character at a time, as it is only used for very
4486 // short strings.
4487 if (ascii) {
4488 __ movb(kScratchRegister, Operand(src, 0));
4489 __ movb(Operand(dest, 0), kScratchRegister);
4490 __ incq(src);
4491 __ incq(dest);
4492 } else {
4493 __ movzxwl(kScratchRegister, Operand(src, 0));
4494 __ movw(Operand(dest, 0), kScratchRegister);
4495 __ addq(src, Immediate(2));
4496 __ addq(dest, Immediate(2));
4497 }
4498 __ decl(count);
4499 __ j(not_zero, &loop);
4500}
4501
4502
4503void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4504 Register dest,
4505 Register src,
4506 Register count,
4507 bool ascii) {
4508 // Copy characters using rep movs of doublewords. Align destination on 4 byte
4509 // boundary before starting rep movs. Copy remaining characters after running
4510 // rep movs.
4511 // Count is positive int32, dest and src are character pointers.
4512 ASSERT(dest.is(rdi)); // rep movs destination
4513 ASSERT(src.is(rsi)); // rep movs source
4514 ASSERT(count.is(rcx)); // rep movs count
4515
4516 // Nothing to do for zero characters.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004517 NearLabel done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004518 __ testl(count, count);
4519 __ j(zero, &done);
4520
4521 // Make count the number of bytes to copy.
4522 if (!ascii) {
4523 STATIC_ASSERT(2 == sizeof(uc16));
4524 __ addl(count, count);
4525 }
4526
4527 // Don't enter the rep movs if there are less than 4 bytes to copy.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004528 NearLabel last_bytes;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004529 __ testl(count, Immediate(~7));
4530 __ j(zero, &last_bytes);
4531
4532 // Copy from edi to esi using rep movs instruction.
4533 __ movl(kScratchRegister, count);
4534 __ shr(count, Immediate(3)); // Number of doublewords to copy.
4535 __ repmovsq();
4536
4537 // Find number of bytes left.
4538 __ movl(count, kScratchRegister);
4539 __ and_(count, Immediate(7));
4540
4541 // Check if there are more bytes to copy.
4542 __ bind(&last_bytes);
4543 __ testl(count, count);
4544 __ j(zero, &done);
4545
4546 // Copy remaining characters.
4547 Label loop;
4548 __ bind(&loop);
4549 __ movb(kScratchRegister, Operand(src, 0));
4550 __ movb(Operand(dest, 0), kScratchRegister);
4551 __ incq(src);
4552 __ incq(dest);
4553 __ decl(count);
4554 __ j(not_zero, &loop);
4555
4556 __ bind(&done);
4557}
4558
4559void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
4560 Register c1,
4561 Register c2,
4562 Register scratch1,
4563 Register scratch2,
4564 Register scratch3,
4565 Register scratch4,
4566 Label* not_found) {
4567 // Register scratch3 is the general scratch register in this function.
4568 Register scratch = scratch3;
4569
4570 // Make sure that both characters are not digits as such strings has a
4571 // different hash algorithm. Don't try to look for these in the symbol table.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004572 NearLabel not_array_index;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004573 __ leal(scratch, Operand(c1, -'0'));
4574 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4575 __ j(above, &not_array_index);
4576 __ leal(scratch, Operand(c2, -'0'));
4577 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
4578 __ j(below_equal, not_found);
4579
4580 __ bind(&not_array_index);
4581 // Calculate the two character string hash.
4582 Register hash = scratch1;
4583 GenerateHashInit(masm, hash, c1, scratch);
4584 GenerateHashAddCharacter(masm, hash, c2, scratch);
4585 GenerateHashGetHash(masm, hash, scratch);
4586
4587 // Collect the two characters in a register.
4588 Register chars = c1;
4589 __ shl(c2, Immediate(kBitsPerByte));
4590 __ orl(chars, c2);
4591
4592 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4593 // hash: hash of two character string.
4594
4595 // Load the symbol table.
4596 Register symbol_table = c2;
4597 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
4598
4599 // Calculate capacity mask from the symbol table capacity.
4600 Register mask = scratch2;
4601 __ SmiToInteger32(mask,
4602 FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
4603 __ decl(mask);
4604
Steve Block44f0eee2011-05-26 01:26:41 +01004605 Register map = scratch4;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004606
4607 // Registers
4608 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4609 // hash: hash of two character string (32-bit int)
4610 // symbol_table: symbol table
4611 // mask: capacity mask (32-bit int)
Steve Block44f0eee2011-05-26 01:26:41 +01004612 // map: -
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004613 // scratch: -
4614
4615 // Perform a number of probes in the symbol table.
4616 static const int kProbes = 4;
4617 Label found_in_symbol_table;
4618 Label next_probe[kProbes];
4619 for (int i = 0; i < kProbes; i++) {
4620 // Calculate entry in symbol table.
4621 __ movl(scratch, hash);
4622 if (i > 0) {
4623 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
4624 }
4625 __ andl(scratch, mask);
4626
Steve Block44f0eee2011-05-26 01:26:41 +01004627 // Load the entry from the symbol table.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004628 Register candidate = scratch; // Scratch register contains candidate.
4629 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
4630 __ movq(candidate,
4631 FieldOperand(symbol_table,
4632 scratch,
4633 times_pointer_size,
4634 SymbolTable::kElementsStartOffset));
4635
4636 // If entry is undefined no string with this hash can be found.
Steve Block44f0eee2011-05-26 01:26:41 +01004637 NearLabel is_string;
4638 __ CmpObjectType(candidate, ODDBALL_TYPE, map);
4639 __ j(not_equal, &is_string);
4640
4641 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004642 __ j(equal, not_found);
Steve Block44f0eee2011-05-26 01:26:41 +01004643 // Must be null (deleted entry).
4644 __ jmp(&next_probe[i]);
4645
4646 __ bind(&is_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004647
4648 // If length is not 2 the string is not a candidate.
4649 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
4650 Smi::FromInt(2));
4651 __ j(not_equal, &next_probe[i]);
4652
4653 // We use kScratchRegister as a temporary register in assumption that
4654 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
4655 Register temp = kScratchRegister;
4656
4657 // Check that the candidate is a non-external ascii string.
Steve Block44f0eee2011-05-26 01:26:41 +01004658 __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004659 __ JumpIfInstanceTypeIsNotSequentialAscii(
4660 temp, temp, &next_probe[i]);
4661
4662 // Check if the two characters match.
4663 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
4664 __ andl(temp, Immediate(0x0000ffff));
4665 __ cmpl(chars, temp);
4666 __ j(equal, &found_in_symbol_table);
4667 __ bind(&next_probe[i]);
4668 }
4669
4670 // No matching 2 character string found by probing.
4671 __ jmp(not_found);
4672
4673 // Scratch register contains result when we fall through to here.
4674 Register result = scratch;
4675 __ bind(&found_in_symbol_table);
4676 if (!result.is(rax)) {
4677 __ movq(rax, result);
4678 }
4679}
4680
4681
4682void StringHelper::GenerateHashInit(MacroAssembler* masm,
4683 Register hash,
4684 Register character,
4685 Register scratch) {
4686 // hash = character + (character << 10);
4687 __ movl(hash, character);
4688 __ shll(hash, Immediate(10));
4689 __ addl(hash, character);
4690 // hash ^= hash >> 6;
4691 __ movl(scratch, hash);
4692 __ sarl(scratch, Immediate(6));
4693 __ xorl(hash, scratch);
4694}
4695
4696
4697void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4698 Register hash,
4699 Register character,
4700 Register scratch) {
4701 // hash += character;
4702 __ addl(hash, character);
4703 // hash += hash << 10;
4704 __ movl(scratch, hash);
4705 __ shll(scratch, Immediate(10));
4706 __ addl(hash, scratch);
4707 // hash ^= hash >> 6;
4708 __ movl(scratch, hash);
4709 __ sarl(scratch, Immediate(6));
4710 __ xorl(hash, scratch);
4711}
4712
4713
4714void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4715 Register hash,
4716 Register scratch) {
4717 // hash += hash << 3;
4718 __ leal(hash, Operand(hash, hash, times_8, 0));
4719 // hash ^= hash >> 11;
4720 __ movl(scratch, hash);
4721 __ sarl(scratch, Immediate(11));
4722 __ xorl(hash, scratch);
4723 // hash += hash << 15;
4724 __ movl(scratch, hash);
4725 __ shll(scratch, Immediate(15));
4726 __ addl(hash, scratch);
4727
4728 // if (hash == 0) hash = 27;
4729 Label hash_not_zero;
4730 __ j(not_zero, &hash_not_zero);
4731 __ movl(hash, Immediate(27));
4732 __ bind(&hash_not_zero);
4733}
4734
4735void SubStringStub::Generate(MacroAssembler* masm) {
4736 Label runtime;
4737
4738 // Stack frame on entry.
4739 // rsp[0]: return address
4740 // rsp[8]: to
4741 // rsp[16]: from
4742 // rsp[24]: string
4743
4744 const int kToOffset = 1 * kPointerSize;
4745 const int kFromOffset = kToOffset + kPointerSize;
4746 const int kStringOffset = kFromOffset + kPointerSize;
4747 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
4748
4749 // Make sure first argument is a string.
4750 __ movq(rax, Operand(rsp, kStringOffset));
4751 STATIC_ASSERT(kSmiTag == 0);
4752 __ testl(rax, Immediate(kSmiTagMask));
4753 __ j(zero, &runtime);
4754 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4755 __ j(NegateCondition(is_string), &runtime);
4756
4757 // rax: string
4758 // rbx: instance type
4759 // Calculate length of sub string using the smi values.
4760 Label result_longer_than_two;
4761 __ movq(rcx, Operand(rsp, kToOffset));
4762 __ movq(rdx, Operand(rsp, kFromOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01004763 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004764
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004765 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004766 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
4767 Label return_rax;
4768 __ j(equal, &return_rax);
4769 // Special handling of sub-strings of length 1 and 2. One character strings
4770 // are handled in the runtime system (looked up in the single character
4771 // cache). Two character strings are looked for in the symbol cache.
4772 __ SmiToInteger32(rcx, rcx);
4773 __ cmpl(rcx, Immediate(2));
4774 __ j(greater, &result_longer_than_two);
4775 __ j(less, &runtime);
4776
4777 // Sub string of length 2 requested.
4778 // rax: string
4779 // rbx: instance type
4780 // rcx: sub string length (value is 2)
4781 // rdx: from index (smi)
4782 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
4783
4784 // Get the two characters forming the sub string.
4785 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
4786 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
4787 __ movzxbq(rcx,
4788 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
4789
4790 // Try to lookup two character string in symbol table.
4791 Label make_two_character_string;
4792 StringHelper::GenerateTwoCharacterSymbolTableProbe(
4793 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
4794 __ ret(3 * kPointerSize);
4795
4796 __ bind(&make_two_character_string);
4797 // Setup registers for allocating the two character string.
4798 __ movq(rax, Operand(rsp, kStringOffset));
4799 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
4800 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4801 __ Set(rcx, 2);
4802
4803 __ bind(&result_longer_than_two);
4804
4805 // rax: string
4806 // rbx: instance type
4807 // rcx: result string length
4808 // Check for flat ascii string
4809 Label non_ascii_flat;
4810 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
4811
4812 // Allocate the result.
4813 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
4814
4815 // rax: result string
4816 // rcx: result string length
4817 __ movq(rdx, rsi); // esi used by following code.
4818 // Locate first character of result.
4819 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
4820 // Load string argument and locate character of sub string start.
4821 __ movq(rsi, Operand(rsp, kStringOffset));
4822 __ movq(rbx, Operand(rsp, kFromOffset));
4823 {
4824 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
4825 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4826 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4827 }
4828
4829 // rax: result string
4830 // rcx: result length
4831 // rdx: original value of rsi
4832 // rdi: first character of result
4833 // rsi: character of sub string start
4834 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4835 __ movq(rsi, rdx); // Restore rsi.
Steve Block44f0eee2011-05-26 01:26:41 +01004836 Counters* counters = masm->isolate()->counters();
4837 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004838 __ ret(kArgumentsSize);
4839
4840 __ bind(&non_ascii_flat);
4841 // rax: string
4842 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
4843 // rcx: result string length
4844 // Check for sequential two byte string
4845 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
4846 __ j(not_equal, &runtime);
4847
4848 // Allocate the result.
4849 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
4850
4851 // rax: result string
4852 // rcx: result string length
4853 __ movq(rdx, rsi); // esi used by following code.
4854 // Locate first character of result.
4855 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
4856 // Load string argument and locate character of sub string start.
4857 __ movq(rsi, Operand(rsp, kStringOffset));
4858 __ movq(rbx, Operand(rsp, kFromOffset));
4859 {
4860 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
4861 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
4862 SeqAsciiString::kHeaderSize - kHeapObjectTag));
4863 }
4864
4865 // rax: result string
4866 // rcx: result length
4867 // rdx: original value of rsi
4868 // rdi: first character of result
4869 // rsi: character of sub string start
4870 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
4871 __ movq(rsi, rdx); // Restore esi.
4872
4873 __ bind(&return_rax);
Steve Block44f0eee2011-05-26 01:26:41 +01004874 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004875 __ ret(kArgumentsSize);
4876
4877 // Just jump to runtime to create the sub string.
4878 __ bind(&runtime);
4879 __ TailCallRuntime(Runtime::kSubString, 3, 1);
4880}
4881
4882
4883void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4884 Register left,
4885 Register right,
4886 Register scratch1,
4887 Register scratch2,
4888 Register scratch3,
4889 Register scratch4) {
4890 // Ensure that you can always subtract a string length from a non-negative
4891 // number (e.g. another length).
4892 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
4893
4894 // Find minimum length and length difference.
4895 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
4896 __ movq(scratch4, scratch1);
4897 __ SmiSub(scratch4,
4898 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004899 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004900 // Register scratch4 now holds left.length - right.length.
4901 const Register length_difference = scratch4;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004902 NearLabel left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004903 __ j(less, &left_shorter);
4904 // The right string isn't longer that the left one.
4905 // Get the right string's length by subtracting the (non-negative) difference
4906 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004907 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004908 __ bind(&left_shorter);
4909 // Register scratch1 now holds Min(left.length, right.length).
4910 const Register min_length = scratch1;
4911
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004912 NearLabel compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004913 // If min-length is zero, go directly to comparing lengths.
4914 __ SmiTest(min_length);
4915 __ j(zero, &compare_lengths);
4916
4917 __ SmiToInteger32(min_length, min_length);
4918
4919 // Registers scratch2 and scratch3 are free.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004920 NearLabel result_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004921 Label loop;
4922 {
4923 // Check characters 0 .. min_length - 1 in a loop.
4924 // Use scratch3 as loop index, min_length as limit and scratch2
4925 // for computation.
4926 const Register index = scratch3;
4927 __ movl(index, Immediate(0)); // Index into strings.
4928 __ bind(&loop);
4929 // Compare characters.
4930 // TODO(lrn): Could we load more than one character at a time?
4931 __ movb(scratch2, FieldOperand(left,
4932 index,
4933 times_1,
4934 SeqAsciiString::kHeaderSize));
4935 // Increment index and use -1 modifier on next load to give
4936 // the previous load extra time to complete.
4937 __ addl(index, Immediate(1));
4938 __ cmpb(scratch2, FieldOperand(right,
4939 index,
4940 times_1,
4941 SeqAsciiString::kHeaderSize - 1));
4942 __ j(not_equal, &result_not_equal);
4943 __ cmpl(index, min_length);
4944 __ j(not_equal, &loop);
4945 }
4946 // Completed loop without finding different characters.
4947 // Compare lengths (precomputed).
4948 __ bind(&compare_lengths);
4949 __ SmiTest(length_difference);
4950 __ j(not_zero, &result_not_equal);
4951
4952 // Result is EQUAL.
4953 __ Move(rax, Smi::FromInt(EQUAL));
4954 __ ret(0);
4955
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004956 NearLabel result_greater;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004957 __ bind(&result_not_equal);
4958 // Unequal comparison of left to right, either character or length.
4959 __ j(greater, &result_greater);
4960
4961 // Result is LESS.
4962 __ Move(rax, Smi::FromInt(LESS));
4963 __ ret(0);
4964
4965 // Result is GREATER.
4966 __ bind(&result_greater);
4967 __ Move(rax, Smi::FromInt(GREATER));
4968 __ ret(0);
4969}
4970
4971
4972void StringCompareStub::Generate(MacroAssembler* masm) {
4973 Label runtime;
4974
4975 // Stack frame on entry.
4976 // rsp[0]: return address
4977 // rsp[8]: right string
4978 // rsp[16]: left string
4979
4980 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
4981 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
4982
4983 // Check for identity.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004984 NearLabel not_same;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004985 __ cmpq(rdx, rax);
4986 __ j(not_equal, &not_same);
4987 __ Move(rax, Smi::FromInt(EQUAL));
Steve Block44f0eee2011-05-26 01:26:41 +01004988 Counters* counters = masm->isolate()->counters();
4989 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004990 __ ret(2 * kPointerSize);
4991
4992 __ bind(&not_same);
4993
4994 // Check that both are sequential ASCII strings.
4995 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4996
4997 // Inline comparison of ascii strings.
Steve Block44f0eee2011-05-26 01:26:41 +01004998 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004999 // Drop arguments from the stack
5000 __ pop(rcx);
5001 __ addq(rsp, Immediate(2 * kPointerSize));
5002 __ push(rcx);
5003 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
5004
5005 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
5006 // tagged as a small integer.
5007 __ bind(&runtime);
5008 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5009}
5010
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005011
Ben Murdochb0fe1622011-05-05 13:52:32 +01005012void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01005013 ASSERT(state_ == CompareIC::SMIS);
5014 NearLabel miss;
5015 __ JumpIfNotBothSmi(rdx, rax, &miss);
5016
5017 if (GetCondition() == equal) {
5018 // For equality we do not care about the sign of the result.
5019 __ subq(rax, rdx);
5020 } else {
5021 NearLabel done;
5022 __ subq(rdx, rax);
5023 __ j(no_overflow, &done);
5024 // Correct sign of result in case of overflow.
5025 __ SmiNot(rdx, rdx);
5026 __ bind(&done);
5027 __ movq(rax, rdx);
5028 }
5029 __ ret(0);
5030
5031 __ bind(&miss);
5032 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005033}
5034
5035
5036void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01005037 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
5038
5039 NearLabel generic_stub;
5040 NearLabel unordered;
5041 NearLabel miss;
5042 Condition either_smi = masm->CheckEitherSmi(rax, rdx);
5043 __ j(either_smi, &generic_stub);
5044
5045 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
5046 __ j(not_equal, &miss);
5047 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
5048 __ j(not_equal, &miss);
5049
5050 // Load left and right operand
5051 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
5052 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5053
5054 // Compare operands
5055 __ ucomisd(xmm0, xmm1);
5056
5057 // Don't base result on EFLAGS when a NaN is involved.
5058 __ j(parity_even, &unordered);
5059
5060 // Return a result of -1, 0, or 1, based on EFLAGS.
5061 // Performing mov, because xor would destroy the flag register.
5062 __ movl(rax, Immediate(0));
5063 __ movl(rcx, Immediate(0));
5064 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
5065 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
5066 __ ret(0);
5067
5068 __ bind(&unordered);
5069
5070 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
5071 __ bind(&generic_stub);
5072 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5073
5074 __ bind(&miss);
5075 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005076}
5077
5078
5079void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01005080 ASSERT(state_ == CompareIC::OBJECTS);
5081 NearLabel miss;
5082 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
5083 __ j(either_smi, &miss);
5084
5085 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
5086 __ j(not_equal, &miss, not_taken);
5087 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
5088 __ j(not_equal, &miss, not_taken);
5089
5090 ASSERT(GetCondition() == equal);
5091 __ subq(rax, rdx);
5092 __ ret(0);
5093
5094 __ bind(&miss);
5095 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005096}
5097
5098
5099void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01005100 // Save the registers.
5101 __ pop(rcx);
5102 __ push(rdx);
5103 __ push(rax);
5104 __ push(rcx);
5105
5106 // Call the runtime system in a fresh internal frame.
Steve Block44f0eee2011-05-26 01:26:41 +01005107 ExternalReference miss =
5108 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
Steve Block1e0659c2011-05-24 12:43:12 +01005109 __ EnterInternalFrame();
5110 __ push(rdx);
5111 __ push(rax);
5112 __ Push(Smi::FromInt(op_));
5113 __ CallExternalReference(miss, 3);
5114 __ LeaveInternalFrame();
5115
5116 // Compute the entry point of the rewritten stub.
5117 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
5118
5119 // Restore registers.
5120 __ pop(rcx);
5121 __ pop(rax);
5122 __ pop(rdx);
5123 __ push(rcx);
5124
5125 // Do a tail call to the rewritten stub.
5126 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005127}
5128
Steve Block1e0659c2011-05-24 12:43:12 +01005129
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005130#undef __
5131
5132} } // namespace v8::internal
5133
5134#endif // V8_TARGET_ARCH_X64