blob: f327b5008543bdfa4f541badfc04e0ababdf4f26 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/bootstrapper.h"
10#include "src/code-stubs.h"
11#include "src/codegen.h"
12#include "src/ic/handler-compiler.h"
13#include "src/ic/ic.h"
14#include "src/isolate.h"
15#include "src/jsregexp.h"
16#include "src/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010018
19namespace v8 {
20namespace internal {
21
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022
23static void InitializeArrayConstructorDescriptor(
24 Isolate* isolate, CodeStubDescriptor* descriptor,
25 int constant_stack_parameter_count) {
26 Address deopt_handler = Runtime::FunctionForId(
27 Runtime::kArrayConstructor)->entry;
28
29 if (constant_stack_parameter_count == 0) {
30 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
31 JS_FUNCTION_STUB_MODE);
32 } else {
33 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
34 JS_FUNCTION_STUB_MODE, PASS_ARGUMENTS);
35 }
36}
37
38
39static void InitializeInternalArrayConstructorDescriptor(
40 Isolate* isolate, CodeStubDescriptor* descriptor,
41 int constant_stack_parameter_count) {
42 Address deopt_handler = Runtime::FunctionForId(
43 Runtime::kInternalArrayConstructor)->entry;
44
45 if (constant_stack_parameter_count == 0) {
46 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
47 JS_FUNCTION_STUB_MODE);
48 } else {
49 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
50 JS_FUNCTION_STUB_MODE, PASS_ARGUMENTS);
51 }
52}
53
54
55void ArrayNoArgumentConstructorStub::InitializeDescriptor(
56 CodeStubDescriptor* descriptor) {
57 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
58}
59
60
61void ArraySingleArgumentConstructorStub::InitializeDescriptor(
62 CodeStubDescriptor* descriptor) {
63 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
64}
65
66
67void ArrayNArgumentsConstructorStub::InitializeDescriptor(
68 CodeStubDescriptor* descriptor) {
69 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
70}
71
72
73void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
76}
77
78
79void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
82}
83
84
85void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
86 CodeStubDescriptor* descriptor) {
87 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
88}
89
90
Kristian Monsen80d68ea2010-09-08 11:05:35 +010091#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010092
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
94void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
95 ExternalReference miss) {
96 // Update the static counter each time a new code stub is generated.
97 isolate()->counters()->code_stubs()->Increment();
98
99 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
100 int param_count = descriptor.GetEnvironmentParameterCount();
101 {
102 // Call the runtime system in a fresh internal frame.
103 FrameScope scope(masm, StackFrame::INTERNAL);
104 DCHECK(param_count == 0 ||
105 rax.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
106 // Push arguments
107 for (int i = 0; i < param_count; ++i) {
108 __ Push(descriptor.GetEnvironmentParameterRegister(i));
109 }
110 __ CallExternalReference(miss, param_count);
111 }
112
Steve Block1e0659c2011-05-24 12:43:12 +0100113 __ Ret();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000114}
115
116
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100117void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100119 const int argument_count = 1;
120 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 __ LoadAddress(arg_reg_1,
122 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100123
124 AllowExternalCallThatCantCauseGC scope(masm);
125 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100127 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100129 __ ret(0);
130}
131
132
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133class FloatingPointHelper : public AllStatic {
134 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 enum ConvertUndefined {
136 CONVERT_UNDEFINED_TO_ZERO,
137 BAILOUT_ON_UNDEFINED
138 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100139 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
140 // If the operands are not both numbers, jump to not_numbers.
141 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
142 // NumberOperands assumes both are smis or heap numbers.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100143 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
144 Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100145};
146
147
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000148void DoubleToIStub::Generate(MacroAssembler* masm) {
149 Register input_reg = this->source();
150 Register final_result_reg = this->destination();
151 DCHECK(is_truncating());
Ben Murdoch257744e2011-11-30 15:57:28 +0000152
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 Label check_negative, process_64_bits, done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000154
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 int double_offset = offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000156
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 // Account for return address and saved regs if input is rsp.
158 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000159
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
161 MemOperand exponent_operand(MemOperand(input_reg,
162 double_offset + kDoubleSize / 2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000163
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164 Register scratch1;
165 Register scratch_candidates[3] = { rbx, rdx, rdi };
166 for (int i = 0; i < 3; i++) {
167 scratch1 = scratch_candidates[i];
168 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
Steve Block1e0659c2011-05-24 12:43:12 +0100169 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100170
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 // Since we must use rcx for shifts below, use some other register (rax)
172 // to calculate the result if ecx is the requested return register.
173 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
174 // Save ecx if it isn't the return register and therefore volatile, or if it
175 // is the return register, then save the temp register we use in its stead
176 // for the result.
177 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
178 __ pushq(scratch1);
179 __ pushq(save_reg);
180
181 bool stash_exponent_copy = !input_reg.is(rsp);
182 __ movl(scratch1, mantissa_operand);
183 __ movsd(xmm0, mantissa_operand);
184 __ movl(rcx, exponent_operand);
185 if (stash_exponent_copy) __ pushq(rcx);
186
187 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
188 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
189 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
190 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
191 __ j(below, &process_64_bits);
192
193 // Result is entirely in lower 32-bits of mantissa
194 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
195 __ subl(rcx, Immediate(delta));
196 __ xorl(result_reg, result_reg);
197 __ cmpl(rcx, Immediate(31));
198 __ j(above, &done);
199 __ shll_cl(scratch1);
200 __ jmp(&check_negative);
201
202 __ bind(&process_64_bits);
203 __ cvttsd2siq(result_reg, xmm0);
204 __ jmp(&done, Label::kNear);
205
206 // If the double was negative, negate the integer result.
207 __ bind(&check_negative);
208 __ movl(result_reg, scratch1);
209 __ negl(result_reg);
210 if (stash_exponent_copy) {
211 __ cmpl(MemOperand(rsp, 0), Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100212 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 __ cmpl(exponent_operand, Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100214 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000215 __ cmovl(greater, result_reg, scratch1);
Steve Block1e0659c2011-05-24 12:43:12 +0100216
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 // Restore registers
Ben Murdochb0fe1622011-05-05 13:52:32 +0100218 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219 if (stash_exponent_copy) {
220 __ addp(rsp, Immediate(kDoubleSize));
221 }
222 if (!final_result_reg.is(result_reg)) {
223 DCHECK(final_result_reg.is(rcx));
224 __ movl(final_result_reg, result_reg);
225 }
226 __ popq(save_reg);
227 __ popq(scratch1);
228 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100229}
230
231
232void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
233 Label* not_numbers) {
234 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
235 // Load operand in rdx into xmm0, or branch to not_numbers.
236 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
237 __ JumpIfSmi(rdx, &load_smi_rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100239 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
240 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
241 // Load operand in rax into xmm1, or branch to not_numbers.
242 __ JumpIfSmi(rax, &load_smi_rax);
243
244 __ bind(&load_nonsmi_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100246 __ j(not_equal, not_numbers);
247 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
248 __ jmp(&done);
249
250 __ bind(&load_smi_rdx);
251 __ SmiToInteger32(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 __ Cvtlsi2sd(xmm0, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100253 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
254
255 __ bind(&load_smi_rax);
256 __ SmiToInteger32(kScratchRegister, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257 __ Cvtlsi2sd(xmm1, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100258 __ bind(&done);
259}
260
261
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100262void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263 const Register exponent = MathPowTaggedDescriptor::exponent();
264 DCHECK(exponent.is(rdx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 const Register base = rax;
266 const Register scratch = rcx;
267 const XMMRegister double_result = xmm3;
268 const XMMRegister double_base = xmm2;
269 const XMMRegister double_exponent = xmm1;
270 const XMMRegister double_scratch = xmm4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100271
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100272 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100273
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100274 // Save 1 in double_result - we need this several times later on.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 __ movp(scratch, Immediate(1));
276 __ Cvtlsi2sd(double_result, scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100277
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100279 Label base_is_smi, unpack_exponent;
280 // The exponent and base are supplied as arguments on the stack.
281 // This can only happen if the stub is called from non-optimized code.
282 // Load input parameters from stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
284 __ movp(base, args.GetArgumentOperand(0));
285 __ movp(exponent, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100286 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
287 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
288 Heap::kHeapNumberMapRootIndex);
289 __ j(not_equal, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100290
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
292 __ jmp(&unpack_exponent, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100293
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100294 __ bind(&base_is_smi);
295 __ SmiToInteger32(base, base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296 __ Cvtlsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100297 __ bind(&unpack_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100298
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100299 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
300 __ SmiToInteger32(exponent, exponent);
301 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100302
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 __ bind(&exponent_not_smi);
304 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
305 Heap::kHeapNumberMapRootIndex);
306 __ j(not_equal, &call_runtime);
307 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100309 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
310 __ SmiToInteger32(exponent, exponent);
311 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100312
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313 __ bind(&exponent_not_smi);
314 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
315 }
316
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 if (exponent_type() != INTEGER) {
318 Label fast_power, try_arithmetic_simplification;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100319 // Detect integer exponents stored as double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 __ DoubleToI(exponent, double_exponent, double_scratch,
321 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
322 &try_arithmetic_simplification,
323 &try_arithmetic_simplification);
324 __ jmp(&int_exponent);
325
326 __ bind(&try_arithmetic_simplification);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 __ cvttsd2si(exponent, double_exponent);
328 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 __ cmpl(exponent, Immediate(0x1));
330 __ j(overflow, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100331
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000332 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333 // Detect square root case. Crankshaft detects constant +/-0.5 at
334 // compile time and uses DoMathPowHalf instead. We then skip this check
335 // for non-constant cases of +/-0.5 as these hardly occur.
336 Label continue_sqrt, continue_rsqrt, not_plus_half;
337 // Test for 0.5.
338 // Load double_scratch with 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000339 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100340 __ movq(double_scratch, scratch);
341 // Already ruled out NaNs for exponent.
342 __ ucomisd(double_scratch, double_exponent);
343 __ j(not_equal, &not_plus_half, Label::kNear);
344
345 // Calculates square root of base. Check for the special case of
346 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
347 // According to IEEE-754, double-precision -Infinity has the highest
348 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100350 __ movq(double_scratch, scratch);
351 __ ucomisd(double_scratch, double_base);
352 // Comparing -Infinity with NaN results in "unordered", which sets the
353 // zero flag as if both were equal. However, it also sets the carry flag.
354 __ j(not_equal, &continue_sqrt, Label::kNear);
355 __ j(carry, &continue_sqrt, Label::kNear);
356
357 // Set result to Infinity in the special case.
358 __ xorps(double_result, double_result);
359 __ subsd(double_result, double_scratch);
360 __ jmp(&done);
361
362 __ bind(&continue_sqrt);
363 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
364 __ xorps(double_scratch, double_scratch);
365 __ addsd(double_scratch, double_base); // Convert -0 to 0.
366 __ sqrtsd(double_result, double_scratch);
367 __ jmp(&done);
368
369 // Test for -0.5.
370 __ bind(&not_plus_half);
371 // Load double_scratch with -0.5 by substracting 1.
372 __ subsd(double_scratch, double_result);
373 // Already ruled out NaNs for exponent.
374 __ ucomisd(double_scratch, double_exponent);
375 __ j(not_equal, &fast_power, Label::kNear);
376
377 // Calculates reciprocal of square root of base. Check for the special
378 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
379 // According to IEEE-754, double-precision -Infinity has the highest
380 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100382 __ movq(double_scratch, scratch);
383 __ ucomisd(double_scratch, double_base);
384 // Comparing -Infinity with NaN results in "unordered", which sets the
385 // zero flag as if both were equal. However, it also sets the carry flag.
386 __ j(not_equal, &continue_rsqrt, Label::kNear);
387 __ j(carry, &continue_rsqrt, Label::kNear);
388
389 // Set result to 0 in the special case.
390 __ xorps(double_result, double_result);
391 __ jmp(&done);
392
393 __ bind(&continue_rsqrt);
394 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
395 __ xorps(double_exponent, double_exponent);
396 __ addsd(double_exponent, double_base); // Convert -0 to +0.
397 __ sqrtsd(double_exponent, double_exponent);
398 __ divsd(double_result, double_exponent);
399 __ jmp(&done);
400 }
401
402 // Using FPU instructions to calculate power.
403 Label fast_power_failed;
404 __ bind(&fast_power);
405 __ fnclex(); // Clear flags to catch exceptions later.
406 // Transfer (B)ase and (E)xponent onto the FPU register stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100408 __ movsd(Operand(rsp, 0), double_exponent);
409 __ fld_d(Operand(rsp, 0)); // E
410 __ movsd(Operand(rsp, 0), double_base);
411 __ fld_d(Operand(rsp, 0)); // B, E
412
413 // Exponent is in st(1) and base is in st(0)
414 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
415 // FYL2X calculates st(1) * log2(st(0))
416 __ fyl2x(); // X
417 __ fld(0); // X, X
418 __ frndint(); // rnd(X), X
419 __ fsub(1); // rnd(X), X-rnd(X)
420 __ fxch(1); // X - rnd(X), rnd(X)
421 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
422 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
423 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100425 // FSCALE calculates st(0) * 2^st(1)
426 __ fscale(); // 2^X, rnd(X)
427 __ fstp(1);
428 // Bail out to runtime in case of exceptions in the status word.
429 __ fnstsw_ax();
430 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
431 __ j(not_zero, &fast_power_failed, Label::kNear);
432 __ fstp_d(Operand(rsp, 0));
433 __ movsd(double_result, Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 __ jmp(&done);
436
437 __ bind(&fast_power_failed);
438 __ fninit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ jmp(&call_runtime);
441 }
442
443 // Calculate power with integer exponent.
444 __ bind(&int_exponent);
445 const XMMRegister double_scratch2 = double_exponent;
446 // Back up exponent as we need to check if exponent is negative later.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 __ movp(scratch, exponent); // Back up exponent.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100448 __ movsd(double_scratch, double_base); // Back up base.
449 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100450
451 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 __ testl(scratch, scratch);
454 __ j(positive, &no_neg, Label::kNear);
455 __ negl(scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100456 __ bind(&no_neg);
457
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 __ j(zero, &while_false, Label::kNear);
459 __ shrl(scratch, Immediate(1));
460 // Above condition means CF==0 && ZF==0. This means that the
461 // bit that has been shifted out is 0 and the result is not 0.
462 __ j(above, &while_true, Label::kNear);
463 __ movsd(double_result, double_scratch);
464 __ j(zero, &while_false, Label::kNear);
465
Ben Murdoch85b71792012-04-11 18:30:58 +0100466 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 __ shrl(scratch, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 __ mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ j(above, &while_true, Label::kNear);
470 __ mulsd(double_result, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100471 __ j(not_zero, &while_true);
472
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100474 // If the exponent is negative, return 1/result.
475 __ testl(exponent, exponent);
476 __ j(greater, &done);
477 __ divsd(double_scratch2, double_result);
478 __ movsd(double_result, double_scratch2);
479 // Test whether result is zero. Bail out to check for subnormal result.
480 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
481 __ xorps(double_scratch2, double_scratch2);
482 __ ucomisd(double_scratch2, double_result);
483 // double_exponent aliased as double_scratch2 has already been overwritten
484 // and may not have contained the exponent value in the first place when the
485 // input was a smi. We reset it with exponent value before bailing out.
486 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000487 __ Cvtlsi2sd(double_exponent, exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100488
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100489 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 Counters* counters = isolate()->counters();
491 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100492 // The arguments are still on the stack.
493 __ bind(&call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100495
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 // The stub is called from non-optimized code, which expects the result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 // as heap number in rax.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100498 __ bind(&done);
499 __ AllocateHeapNumber(rax, rcx, &call_runtime);
500 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
501 __ IncrementCounter(counters->math_pow(), 1);
502 __ ret(2 * kPointerSize);
503 } else {
504 __ bind(&call_runtime);
505 // Move base to the correct argument register. Exponent is already in xmm1.
506 __ movsd(xmm0, double_base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 DCHECK(double_exponent.is(xmm1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100508 {
509 AllowExternalCallThatCantCauseGC scope(masm);
510 __ PrepareCallCFunction(2);
511 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 ExternalReference::power_double_double_function(isolate()), 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100513 }
514 // Return value is in xmm0.
515 __ movsd(double_result, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100516
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100517 __ bind(&done);
518 __ IncrementCounter(counters->math_pow(), 1);
519 __ ret(0);
520 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100521}
522
523
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
525 Label miss;
526 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400527 // Ensure that the vector and slot registers won't be clobbered before
528 // calling the miss handler.
529 DCHECK(!FLAG_vector_ics ||
530 !AreAliased(r8, r9, VectorLoadICDescriptor::VectorRegister(),
531 VectorLoadICDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532
533 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
534 r9, &miss);
535 __ bind(&miss);
536 PropertyAccessCompiler::TailCallBuiltin(
537 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
538}
539
540
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100541void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
542 // The key is in rdx and the parameter count is in rax.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 DCHECK(rdx.is(ArgumentsAccessReadDescriptor::index()));
544 DCHECK(rax.is(ArgumentsAccessReadDescriptor::parameter_count()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100545
546 // Check that the key is a smi.
547 Label slow;
548 __ JumpIfNotSmi(rdx, &slow);
549
Steve Block44f0eee2011-05-26 01:26:41 +0100550 // Check if the calling frame is an arguments adaptor frame. We look at the
551 // context offset, and if the frame is not a regular one, then we find a
552 // Smi instead of the context. We can't use SmiCompare here, because that
553 // only works for comparing two smis.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100554 Label adaptor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100556 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
557 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100558 __ j(equal, &adaptor);
559
560 // Check index against formal parameters count limit passed in
561 // through register rax. Use unsigned comparison to get negative
562 // check for free.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 __ cmpp(rdx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100564 __ j(above_equal, &slow);
565
566 // Read the argument from the stack and return it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567 __ SmiSub(rax, rax, rdx);
568 __ SmiToInteger32(rax, rax);
569 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
570 __ movp(rax, args.GetArgumentOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100571 __ Ret();
572
573 // Arguments adaptor case: Check index against actual arguments
574 // limit found in the arguments adaptor frame. Use unsigned
575 // comparison to get negative check for free.
576 __ bind(&adaptor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 __ movp(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
578 __ cmpp(rdx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100579 __ j(above_equal, &slow);
580
581 // Read the argument from the stack and return it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 __ SmiSub(rcx, rcx, rdx);
583 __ SmiToInteger32(rcx, rcx);
584 StackArgumentsAccessor adaptor_args(rbx, rcx,
585 ARGUMENTS_DONT_CONTAIN_RECEIVER);
586 __ movp(rax, adaptor_args.GetArgumentOperand(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100587 __ Ret();
588
589 // Slow-case: Handle non-smi or out-of-bounds access to arguments
590 // by calling the runtime system.
591 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 __ PopReturnAddressTo(rbx);
593 __ Push(rdx);
594 __ PushReturnAddressFrom(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100595 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
596}
597
598
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000600 // Stack layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 // rsp[0] : return address
602 // rsp[8] : number of parameters (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000603 // rsp[16] : receiver displacement
604 // rsp[24] : function
605 // Registers used over the whole function:
606 // rbx: the mapped parameter count (untagged)
607 // rax: the allocated object (tagged).
608
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 Factory* factory = isolate()->factory();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000610
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
612 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000613 // rbx = parameter count (untagged)
614
615 // Check if the calling frame is an arguments adaptor frame.
616 Label runtime;
617 Label adaptor_frame, try_allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
619 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000620 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
621 __ j(equal, &adaptor_frame);
622
623 // No adaptor, parameter count = argument count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000624 __ movp(rcx, rbx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000625 __ jmp(&try_allocate, Label::kNear);
626
627 // We have an adaptor frame. Patch the parameters pointer.
628 __ bind(&adaptor_frame);
629 __ SmiToInteger64(rcx,
630 Operand(rdx,
631 ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000633 StandardFrameConstants::kCallerSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 __ movp(args.GetArgumentOperand(1), rdx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000635
636 // rbx = parameter count (untagged)
637 // rcx = argument count (untagged)
638 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 __ cmpp(rbx, rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000640 __ j(less_equal, &try_allocate, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000641 __ movp(rbx, rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000642
643 __ bind(&try_allocate);
644
645 // Compute the sizes of backing store, parameter map, and arguments object.
646 // 1. Parameter map, has 2 extra words containing context and backing store.
647 const int kParameterMapHeaderSize =
648 FixedArray::kHeaderSize + 2 * kPointerSize;
649 Label no_parameter_map;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 __ xorp(r8, r8);
651 __ testp(rbx, rbx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000652 __ j(zero, &no_parameter_map, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000654 __ bind(&no_parameter_map);
655
656 // 2. Backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000657 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000658
659 // 3. Arguments object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000661
662 // Do the allocation of all three objects in one go.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000663 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000664
665 // rax = address of new object(s) (tagged)
666 // rcx = argument count (untagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667 // Get the arguments map from the current native context into rdi.
668 Label has_mapped_parameters, instantiate;
669 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
670 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
671 __ testp(rbx, rbx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000672 __ j(not_zero, &has_mapped_parameters, Label::kNear);
673
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
675 __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
676 __ jmp(&instantiate, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000677
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_MAP_INDEX;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000679 __ bind(&has_mapped_parameters);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680 __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
681 __ bind(&instantiate);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000682
683 // rax = address of new object (tagged)
684 // rbx = mapped parameter count (untagged)
685 // rcx = argument count (untagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 // rdi = address of arguments map (tagged)
687 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
688 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
689 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
690 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000691
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100692 // Set up the callee in-object property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000693 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000694 __ movp(rdx, args.GetArgumentOperand(0));
695 __ AssertNotSmi(rdx);
696 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000697 Heap::kArgumentsCalleeIndex * kPointerSize),
698 rdx);
699
700 // Use the length (smi tagged) and set that as an in-object property too.
701 // Note: rcx is tagged from here on.
702 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
703 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000705 Heap::kArgumentsLengthIndex * kPointerSize),
706 rcx);
707
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100708 // Set up the elements pointer in the allocated arguments object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000709 // If we allocated a parameter map, edi will point there, otherwise to the
710 // backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711 __ leap(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize));
712 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000713
714 // rax = address of new object (tagged)
715 // rbx = mapped parameter count (untagged)
716 // rcx = argument count (tagged)
717 // rdi = address of parameter map or backing store (tagged)
718
719 // Initialize parameter map. If there are no mapped arguments, we're done.
720 Label skip_parameter_map;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 __ testp(rbx, rbx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000722 __ j(zero, &skip_parameter_map);
723
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000725 // rbx contains the untagged argument count. Add 2 and tag to write.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000726 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000727 __ Integer64PlusConstantToSmi(r9, rbx, 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000728 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
729 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
730 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
731 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000732
733 // Copy the parameter slots and the holes in the arguments.
734 // We need to fill in mapped_parameter_count slots. They index the context,
735 // where parameters are stored in reverse order, at
736 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
737 // The mapped parameter thus need to get indices
738 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
739 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
740 // We loop from right to left.
741 Label parameters_loop, parameters_test;
742
743 // Load tagged parameter count into r9.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100744 __ Integer32ToSmi(r9, rbx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000745 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746 __ addp(r8, args.GetArgumentOperand(2));
747 __ subp(r8, r9);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000748 __ Move(r11, factory->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749 __ movp(rdx, rdi);
750 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000751 // r9 = loop variable (tagged)
752 // r8 = mapping index (tagged)
753 // r11 = the hole value
754 // rdx = address of parameter map (tagged)
755 // rdi = address of backing store (tagged)
756 __ jmp(&parameters_test, Label::kNear);
757
758 __ bind(&parameters_loop);
759 __ SmiSubConstant(r9, r9, Smi::FromInt(1));
760 __ SmiToInteger64(kScratchRegister, r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761 __ movp(FieldOperand(rdx, kScratchRegister,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000762 times_pointer_size,
763 kParameterMapHeaderSize),
764 r8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000765 __ movp(FieldOperand(rdi, kScratchRegister,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000766 times_pointer_size,
767 FixedArray::kHeaderSize),
768 r11);
769 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
770 __ bind(&parameters_test);
771 __ SmiTest(r9);
772 __ j(not_zero, &parameters_loop, Label::kNear);
773
774 __ bind(&skip_parameter_map);
775
776 // rcx = argument count (tagged)
777 // rdi = address of backing store (tagged)
778 // Copy arguments header and remaining slots (if there are any).
779 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
780 factory->fixed_array_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000782
783 Label arguments_loop, arguments_test;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 __ movp(r8, rbx);
785 __ movp(rdx, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100786 // Untag rcx for the loop below.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000787 __ SmiToInteger64(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000788 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
789 __ subp(rdx, kScratchRegister);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000790 __ jmp(&arguments_test, Label::kNear);
791
792 __ bind(&arguments_loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 __ subp(rdx, Immediate(kPointerSize));
794 __ movp(r9, Operand(rdx, 0));
795 __ movp(FieldOperand(rdi, r8,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000796 times_pointer_size,
797 FixedArray::kHeaderSize),
798 r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 __ addp(r8, Immediate(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000800
801 __ bind(&arguments_test);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802 __ cmpp(r8, rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000803 __ j(less, &arguments_loop, Label::kNear);
804
805 // Return and remove the on-stack parameters.
806 __ ret(3 * kPointerSize);
807
808 // Do the runtime call to allocate the arguments object.
809 // rcx = argument count (untagged)
810 __ bind(&runtime);
811 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000812 __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count.
813 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000814}
815
816
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
818 // rsp[0] : return address
819 // rsp[8] : number of parameters
820 // rsp[16] : receiver displacement
821 // rsp[24] : function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000822
823 // Check if the calling frame is an arguments adaptor frame.
824 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000825 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
826 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000827 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
828 __ j(not_equal, &runtime);
829
830 // Patch the arguments.length and the parameters pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
832 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
833 __ movp(args.GetArgumentOperand(2), rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000834 __ SmiToInteger64(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000836 StandardFrameConstants::kCallerSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000837 __ movp(args.GetArgumentOperand(1), rdx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000838
839 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000840 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
841}
842
843
844void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
845 // Return address is on the stack.
846 Label slow;
847
848 Register receiver = LoadDescriptor::ReceiverRegister();
849 Register key = LoadDescriptor::NameRegister();
850 Register scratch = rax;
851 DCHECK(!scratch.is(receiver) && !scratch.is(key));
852
853 // Check that the key is an array index, that is Uint32.
854 STATIC_ASSERT(kSmiValueSize <= 32);
855 __ JumpUnlessNonNegativeSmi(key, &slow);
856
857 // Everything is fine, call runtime.
858 __ PopReturnAddressTo(scratch);
859 __ Push(receiver); // receiver
860 __ Push(key); // key
861 __ PushReturnAddressFrom(scratch);
862
863 // Perform tail call to the entry.
864 __ TailCallExternalReference(
865 ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
866 masm->isolate()),
867 2, 1);
868
869 __ bind(&slow);
870 PropertyAccessCompiler::TailCallBuiltin(
871 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000872}
873
874
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400875void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
876 // Return address is on the stack.
877 Label miss;
878
879 Register receiver = LoadDescriptor::ReceiverRegister();
880 Register index = LoadDescriptor::NameRegister();
881 Register scratch = rdi;
882 Register result = rax;
883 DCHECK(!scratch.is(receiver) && !scratch.is(index));
884 DCHECK(!FLAG_vector_ics ||
885 (!scratch.is(VectorLoadICDescriptor::VectorRegister()) &&
886 result.is(VectorLoadICDescriptor::SlotRegister())));
887
888 // StringCharAtGenerator doesn't use the result register until it's passed
889 // the different miss possibilities. If it did, we would have a conflict
890 // when FLAG_vector_ics is true.
891 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
892 &miss, // When not a string.
893 &miss, // When not a number.
894 &miss, // When index out of range.
895 STRING_INDEX_IS_ARRAY_INDEX,
896 RECEIVER_IS_STRING);
897 char_at_generator.GenerateFast(masm);
898 __ ret(0);
899
900 StubRuntimeCallHelper call_helper;
901 char_at_generator.GenerateSlow(masm, call_helper);
902
903 __ bind(&miss);
904 PropertyAccessCompiler::TailCallBuiltin(
905 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
906}
907
908
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000909void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 // rsp[0] : return address
911 // rsp[8] : number of parameters
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100912 // rsp[16] : receiver displacement
913 // rsp[24] : function
914
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100915 // Check if the calling frame is an arguments adaptor frame.
916 Label adaptor_frame, try_allocate, runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000917 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
918 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000919 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100920 __ j(equal, &adaptor_frame);
921
922 // Get the length from the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
924 __ movp(rcx, args.GetArgumentOperand(2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000925 __ SmiToInteger64(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100926 __ jmp(&try_allocate);
927
928 // Patch the arguments.length and the parameters pointer.
929 __ bind(&adaptor_frame);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
931 __ movp(args.GetArgumentOperand(2), rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000932 __ SmiToInteger64(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000934 StandardFrameConstants::kCallerSPOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935 __ movp(args.GetArgumentOperand(1), rdx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100936
937 // Try the new space allocation. Start out with computing the size of
938 // the arguments object and the elements array.
939 Label add_arguments_object;
940 __ bind(&try_allocate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 __ testp(rcx, rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000942 __ j(zero, &add_arguments_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 __ leap(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100944 __ bind(&add_arguments_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 __ addp(rcx, Immediate(Heap::kStrictArgumentsObjectSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100946
947 // Do the allocation of both objects in one go.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100949
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950 // Get the arguments map from the current native context.
951 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
952 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
953 const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX);
954 __ movp(rdi, Operand(rdi, offset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100955
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
957 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
958 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
959 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100960
961 // Get the length (smi tagged) and set that as an in-object property too.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000962 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000963 __ movp(rcx, args.GetArgumentOperand(2));
964 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000965 Heap::kArgumentsLengthIndex * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100966 rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100967
968 // If there are no actual arguments, we're done.
969 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000970 __ testp(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100971 __ j(zero, &done);
972
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000973 // Get the parameters pointer from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000974 __ movp(rdx, args.GetArgumentOperand(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100975
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100976 // Set up the elements pointer in the allocated arguments object and
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100977 // initialize the header in the elements fixed array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000978 __ leap(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize));
979 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100980 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000981 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000982
983
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000984 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000985 // Untag the length for the loop below.
986 __ SmiToInteger64(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100987
988 // Copy the fixed array slots.
989 Label loop;
990 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000991 __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
992 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
993 __ addp(rdi, Immediate(kPointerSize));
994 __ subp(rdx, Immediate(kPointerSize));
995 __ decp(rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100996 __ j(not_zero, &loop);
997
998 // Return and remove the on-stack parameters.
999 __ bind(&done);
1000 __ ret(3 * kPointerSize);
1001
1002 // Do the runtime call to allocate the arguments object.
1003 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001004 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001005}
1006
1007
1008void RegExpExecStub::Generate(MacroAssembler* masm) {
1009 // Just jump directly to runtime if native RegExp is not selected at compile
1010 // time or if regexp entry in generated code is turned off runtime switch or
1011 // at compilation.
1012#ifdef V8_INTERPRETED_REGEXP
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001013 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001014#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001015
1016 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017 // rsp[0] : return address
1018 // rsp[8] : last_match_info (expected JSArray)
1019 // rsp[16] : previous index
1020 // rsp[24] : subject string
1021 // rsp[32] : JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001022
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 enum RegExpExecStubArgumentIndices {
1024 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1025 SUBJECT_STRING_ARGUMENT_INDEX,
1026 PREVIOUS_INDEX_ARGUMENT_INDEX,
1027 LAST_MATCH_INFO_ARGUMENT_INDEX,
1028 REG_EXP_EXEC_ARGUMENT_COUNT
1029 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1032 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001033 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001034 // Ensure that a RegExp stack is allocated.
1035 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001036 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001037 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001038 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001039 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 __ testp(kScratchRegister, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001041 __ j(zero, &runtime);
1042
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001043 // Check that the first argument is a JSRegExp object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001044 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001045 __ JumpIfSmi(rax, &runtime);
1046 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1047 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001048
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001049 // Check that the RegExp has been compiled (data contains a fixed array).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001050 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001051 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +01001052 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001053 __ Check(NegateCondition(is_smi),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001054 kUnexpectedTypeForRegExpDataFixedArrayExpected);
Steve Block44f0eee2011-05-26 01:26:41 +01001055 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001056 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001057 }
1058
Steve Block44f0eee2011-05-26 01:26:41 +01001059 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001060 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +01001061 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001062 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1063 __ j(not_equal, &runtime);
1064
Steve Block44f0eee2011-05-26 01:26:41 +01001065 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001066 // Check that the number of captures fit in the static offsets vector buffer.
1067 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +01001068 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001069 // Check (number_of_captures + 1) * 2 <= offsets vector size
1070 // Or number_of_captures <= offsets vector size / 2 - 1
1071 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1072 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001073 __ j(above, &runtime);
1074
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001075 // Reset offset for possibly sliced string.
1076 __ Set(r14, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1078 __ JumpIfSmi(rdi, &runtime);
1079 __ movp(r15, rdi); // Make a copy of the original subject string.
1080 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001081 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082 // rax: RegExp data (FixedArray)
1083 // rdi: subject string
1084 // r15: subject string
1085 // Handle subject string according to its encoding and representation:
1086 // (1) Sequential two byte? If yes, go to (9).
1087 // (2) Sequential one byte? If yes, go to (6).
1088 // (3) Anything but sequential or cons? If yes, go to (7).
1089 // (4) Cons string. If the string is flat, replace subject with first string.
1090 // Otherwise bailout.
1091 // (5a) Is subject sequential two byte? If yes, go to (9).
1092 // (5b) Is subject external? If yes, go to (8).
1093 // (6) One byte sequential. Load regexp code for one byte.
1094 // (E) Carry on.
1095 /// [...]
1096
1097 // Deferred code at the end of the stub:
1098 // (7) Not a long external string? If yes, go to (10).
1099 // (8) External string. Make it, offset-wise, look like a sequential string.
1100 // (8a) Is the external string one byte? If yes, go to (6).
1101 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1102 // (10) Short external string or not a string? If yes, bail out to runtime.
1103 // (11) Sliced string. Replace subject with parent. Go to (5a).
1104
1105 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1106 external_string /* 8 */, check_underlying /* 5a */,
1107 not_seq_nor_cons /* 7 */, check_code /* E */,
1108 not_long_external /* 10 */;
1109
1110 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001111 __ andb(rbx, Immediate(kIsNotStringMask |
1112 kStringRepresentationMask |
1113 kStringEncodingMask |
1114 kShortExternalStringMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001115 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001116 __ j(zero, &seq_two_byte_string); // Go to (9).
1117
1118 // (2) Sequential one byte? If yes, go to (6).
1119 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001120 __ andb(rbx, Immediate(kIsNotStringMask |
1121 kStringRepresentationMask |
1122 kShortExternalStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001124
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001125 // (3) Anything but sequential or cons? If yes, go to (7).
1126 // We check whether the subject string is a cons, since sequential strings
1127 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001128 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1129 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001130 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1131 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 __ cmpp(rbx, Immediate(kExternalStringTag));
1133 __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 // (4) Cons string. Check that it's flat.
1136 // Replace subject with first string and reload instance type.
Steve Block44f0eee2011-05-26 01:26:41 +01001137 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001138 Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001139 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
1141 __ bind(&check_underlying);
1142 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1143 __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1144
1145 // (5a) Is subject sequential two byte? If yes, go to (9).
1146 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001147 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 __ j(zero, &seq_two_byte_string); // Go to (9).
1149 // (5b) Is subject external? If yes, go to (8).
1150 __ testb(rbx, Immediate(kStringRepresentationMask));
1151 // The underlying external string is never a short external string.
1152 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1153 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1154 __ j(not_zero, &external_string); // Go to (8)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001155
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 // (6) One byte sequential. Load regexp code for one byte.
1157 __ bind(&seq_one_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001158 // rax: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001159 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
1160 __ Set(rcx, 1); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001161
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001162 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001163 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001164 // r11: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001165 // Check that the irregexp code has been generated for the actual string
1166 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00001167 // smi (code flushing support)
1168 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001169
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001170 // rdi: sequential subject string (or look-alike, external string)
1171 // r15: original subject string
1172 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001173 // r11: code
1174 // Load used arguments before starting to push arguments for call to native
1175 // RegExp code to avoid handling changing stack height.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176 // We have to use r15 instead of rdi to load the length because rdi might
1177 // have been only made to look like a sequential string when it actually
1178 // is an external string.
1179 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1180 __ JumpIfNotSmi(rbx, &runtime);
1181 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1182 __ j(above_equal, &runtime);
1183 __ SmiToInteger64(rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001184
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001185 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001186 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001187 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001188 // r11: code
1189 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01001191 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001192
Steve Block44f0eee2011-05-26 01:26:41 +01001193 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001194 static const int kRegExpExecuteArguments = 9;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001195 int argument_slots_on_stack =
1196 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +01001197 __ EnterApiExitFrame(argument_slots_on_stack);
1198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199 // Argument 9: Pass current isolate address.
1200 __ LoadAddress(kScratchRegister,
1201 ExternalReference::isolate_address(isolate()));
1202 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
Steve Block44f0eee2011-05-26 01:26:41 +01001203 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001204
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 // Argument 8: Indicate that this is a direct call from JavaScript.
1206 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001207 Immediate(1));
1208
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209 // Argument 7: Start (high end) of backtracking stack memory area.
1210 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
1211 __ movp(r9, Operand(kScratchRegister, 0));
1212 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
1213 __ addp(r9, Operand(kScratchRegister, 0));
1214 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
1215
1216 // Argument 6: Set the number of capture registers to zero to force global
1217 // regexps to behave as non-global. This does not affect non-global regexps.
1218 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001219#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
1221 Immediate(0));
1222#else
1223 __ Set(r9, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001224#endif
1225
1226 // Argument 5: static offsets vector buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001227 __ LoadAddress(
1228 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001229 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1230#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001232#endif
1233
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001234 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001235 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001237 // r11: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001238 // r14: slice offset
1239 // r15: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001240
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001241 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001242 __ movp(arg_reg_2, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001243
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001244 // Argument 4: End of string data
1245 // Argument 3: Start of string data
1246 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1247 // Prepare start and end index of the input.
1248 // Load the length from the original sliced string if that is the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249 __ addp(rbx, r14);
1250 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
1251 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001252
1253 // rbx: start index of the input
1254 // r14: end index of the input
1255 // r15: original subject string
1256 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1257 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 __ leap(arg_reg_4,
1259 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
1260 __ leap(arg_reg_3,
1261 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001262 __ jmp(&setup_rest, Label::kNear);
1263 __ bind(&setup_two_byte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 __ leap(arg_reg_4,
1265 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
1266 __ leap(arg_reg_3,
1267 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001268 __ bind(&setup_rest);
1269
1270 // Argument 1: Original subject string.
1271 // The original subject is in the previous stack frame. Therefore we have to
1272 // use rbp, which points exactly to one pointer size below the previous rsp.
1273 // (Because creating a new stack frame pushes the previous rbp onto the stack
1274 // and thereby moves up rsp by one kPointerSize.)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001275 __ movp(arg_reg_1, r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001276
1277 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001279 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001280
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001282
1283 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +00001284 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001285 Label exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286 __ cmpl(rax, Immediate(1));
1287 // We expect exactly one result since we force the called regexp to behave
1288 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +00001289 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001290 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001291 __ j(equal, &exception);
1292 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1293 // If none of the above, it can only be retry.
1294 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001295 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001296
1297 // For failure return null.
1298 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001300
1301 // Load RegExp data.
1302 __ bind(&success);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001303 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1304 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001305 __ SmiToInteger32(rax,
1306 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1307 // Calculate number of capture registers (number_of_captures + 1) * 2.
1308 __ leal(rdx, Operand(rax, rax, times_1, 2));
1309
1310 // rdx: Number of capture registers
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 // Check that the fourth object is a JSArray object.
1312 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1313 __ JumpIfSmi(r15, &runtime);
1314 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
1315 __ j(not_equal, &runtime);
1316 // Check that the JSArray is in fast case.
1317 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
1318 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
1319 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
1320 __ j(not_equal, &runtime);
1321 // Check that the last match info has space for the capture registers and the
1322 // additional information. Ensure no overflow in add.
1323 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1324 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1325 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
1326 __ cmpl(rdx, rax);
1327 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001328
1329 // rbx: last_match_info backing store (FixedArray)
1330 // rdx: number of capture registers
1331 // Store the capture count.
1332 __ Integer32ToSmi(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001334 kScratchRegister);
1335 // Store last subject and last input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001336 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1337 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
1338 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001339 __ RecordWriteField(rbx,
1340 RegExpImpl::kLastSubjectOffset,
1341 rax,
1342 rdi,
1343 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001344 __ movp(rax, rcx);
1345 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001346 __ RecordWriteField(rbx,
1347 RegExpImpl::kLastInputOffset,
1348 rax,
1349 rdi,
1350 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001351
1352 // Get the static offsets vector filled by the native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001353 __ LoadAddress(
1354 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001355
1356 // rbx: last_match_info backing store (FixedArray)
1357 // rcx: offsets vector
1358 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00001359 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001360 // Capture register counter starts from number of capture registers and
1361 // counts down until wraping after zero.
1362 __ bind(&next_capture);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363 __ subp(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001364 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001365 // Read the value from the static offsets vector buffer and make it a smi.
1366 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001367 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001368 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001369 __ movp(FieldOperand(rbx,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001370 rdx,
1371 times_pointer_size,
1372 RegExpImpl::kFirstCaptureOffset),
1373 rdi);
1374 __ jmp(&next_capture);
1375 __ bind(&done);
1376
1377 // Return last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 __ movp(rax, r15);
1379 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001380
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001381 __ bind(&exception);
1382 // Result must now be exception. If there is no pending exception already a
1383 // stack overflow (on the backtrack stack) was detected in RegExp code but
1384 // haven't created the exception yet. Handle that in the runtime system.
1385 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +01001386 ExternalReference pending_exception_address(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001387 Isolate::kPendingExceptionAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001388 Operand pending_exception_operand =
1389 masm->ExternalOperand(pending_exception_address, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001390 __ movp(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001391 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 __ cmpp(rax, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001393 __ j(equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394 __ movp(pending_exception_operand, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001395
1396 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001397 Label termination_exception;
1398 __ j(equal, &termination_exception, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001399 __ Throw(rax);
1400
1401 __ bind(&termination_exception);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001402 __ ThrowUncatchable(rax);
1403
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 // Do the runtime call to execute the regexp.
1405 __ bind(&runtime);
1406 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1407
1408 // Deferred code for string handling.
1409 // (7) Not a long external string? If yes, go to (10).
1410 __ bind(&not_seq_nor_cons);
1411 // Compare flags are still set from (3).
1412 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1413
1414 // (8) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001415 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001417 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1418 if (FLAG_debug_code) {
1419 // Assert that we do not have a cons or slice (indirect strings) here.
1420 // Sequential strings have already been ruled out.
1421 __ testb(rbx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001422 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001423 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001425 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001426 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1427 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001428 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429 // (8a) Is the external string one byte? If yes, go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001430 __ testb(rbx, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001431 __ j(not_zero, &seq_one_byte_string); // Goto (6).
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001432
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 // rdi: subject string (flat two-byte)
1434 // rax: RegExp data (FixedArray)
1435 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1436 __ bind(&seq_two_byte_string);
1437 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1438 __ Set(rcx, 0); // Type is two byte.
1439 __ jmp(&check_code); // Go to (E).
1440
1441 // (10) Not a string or a short external string? If yes, bail out to runtime.
1442 __ bind(&not_long_external);
1443 // Catch non-string subject or short external string.
1444 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1445 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
1446 __ j(not_zero, &runtime);
1447
1448 // (11) Sliced string. Replace subject with parent. Go to (5a).
1449 // Load offset into r14 and replace subject string with parent.
1450 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1451 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1452 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001453#endif // V8_INTERPRETED_REGEXP
1454}
1455
1456
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001457static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001458 DCHECK(cc != equal);
1459 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001460 || (cc == greater) || (cc == greater_equal));
1461 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1462}
1463
1464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465static void CheckInputType(MacroAssembler* masm, Register input,
1466 CompareICState::State expected, Label* fail) {
1467 Label ok;
1468 if (expected == CompareICState::SMI) {
1469 __ JumpIfNotSmi(input, fail);
1470 } else if (expected == CompareICState::NUMBER) {
1471 __ JumpIfSmi(input, &ok);
1472 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1473 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001474 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001475 // We could be strict about internalized/non-internalized here, but as long as
1476 // hydrogen doesn't care, the stub doesn't have to care either.
1477 __ bind(&ok);
1478}
1479
1480
1481static void BranchIfNotInternalizedString(MacroAssembler* masm,
1482 Label* label,
1483 Register object,
1484 Register scratch) {
1485 __ JumpIfSmi(object, label);
1486 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1487 __ movzxbp(scratch,
1488 FieldOperand(scratch, Map::kInstanceTypeOffset));
1489 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1490 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1491 __ j(not_zero, label);
1492}
1493
1494
1495void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1496 Label check_unequal_objects, done;
1497 Condition cc = GetCondition();
1498 Factory* factory = isolate()->factory();
1499
1500 Label miss;
1501 CheckInputType(masm, rdx, left(), &miss);
1502 CheckInputType(masm, rax, right(), &miss);
1503
1504 // Compare two smis.
1505 Label non_smi, smi_done;
1506 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1507 __ subp(rdx, rax);
1508 __ j(no_overflow, &smi_done);
1509 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1510 __ bind(&smi_done);
1511 __ movp(rax, rdx);
1512 __ ret(0);
1513 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001514
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001515 // The compare stub returns a positive, negative, or zero 64-bit integer
1516 // value in rax, corresponding to result of comparing the two inputs.
1517 // NOTICE! This code is only reached after a smi-fast-case check, so
1518 // it is certain that at least one operand isn't a smi.
1519
1520 // Two identical objects are equal unless they are both NaN or undefined.
1521 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001522 Label not_identical;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 __ cmpp(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001524 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001525
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001526 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001527 // Check for undefined. undefined OP undefined is false even though
1528 // undefined == undefined.
Ben Murdoch257744e2011-11-30 15:57:28 +00001529 Label check_for_nan;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001530 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001531 __ j(not_equal, &check_for_nan, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532 __ Set(rax, NegativeComparisonResult(cc));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001533 __ ret(0);
1534 __ bind(&check_for_nan);
1535 }
1536
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001537 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001538 // so we do the second best thing - test it ourselves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539 Label heap_number;
1540 // If it's not a heap number, then return equal for (in)equality operator.
1541 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1542 factory->heap_number_map());
1543 __ j(equal, &heap_number, Label::kNear);
1544 if (cc != equal) {
1545 // Call runtime on identical objects. Otherwise return equal.
1546 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1547 __ j(above_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001548 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549 __ Set(rax, EQUAL);
1550 __ ret(0);
1551
1552 __ bind(&heap_number);
1553 // It is a heap number, so return equal if it's not NaN.
1554 // For NaN, return 1 for every condition except greater and
1555 // greater-equal. Return -1 for them, so the comparison yields
1556 // false for all conditions except not-equal.
1557 __ Set(rax, EQUAL);
1558 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1559 __ ucomisd(xmm0, xmm0);
1560 __ setcc(parity_even, rax);
1561 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1562 if (cc == greater_equal || cc == greater) {
1563 __ negp(rax);
1564 }
1565 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001566
1567 __ bind(&not_identical);
1568 }
1569
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570 if (cc == equal) { // Both strict and non-strict.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001571 Label slow; // Fallthrough label.
1572
1573 // If we're doing a strict equality comparison, we don't have to do
1574 // type conversion, so we generate code to do fast comparison for objects
1575 // and oddballs. Non-smi numbers and strings still go through the usual
1576 // slow-case code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001577 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001578 // If either is a Smi (we know that not both are), then they can only
1579 // be equal if the other is a HeapNumber. If so, use the slow case.
1580 {
1581 Label not_smis;
1582 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1583
1584 // Check if the non-smi operand is a heap number.
1585 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001586 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001587 // If heap number, handle it in the slow case.
1588 __ j(equal, &slow);
1589 // Return non-equal. ebx (the lower half of rbx) is not zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 __ movp(rax, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001591 __ ret(0);
1592
1593 __ bind(&not_smis);
1594 }
1595
1596 // If either operand is a JSObject or an oddball value, then they are not
1597 // equal since their pointers are different
1598 // There is no test for undetectability in strict equality.
1599
1600 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001601 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001602 Label first_non_object;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001603 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001604 __ j(below, &first_non_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001605 // Return non-zero (rax (not rax) is not zero)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001606 Label return_not_equal;
1607 STATIC_ASSERT(kHeapObjectTag != 0);
1608 __ bind(&return_not_equal);
1609 __ ret(0);
1610
1611 __ bind(&first_non_object);
1612 // Check for oddballs: true, false, null, undefined.
1613 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1614 __ j(equal, &return_not_equal);
1615
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001616 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001617 __ j(above_equal, &return_not_equal);
1618
1619 // Check for oddballs: true, false, null, undefined.
1620 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1621 __ j(equal, &return_not_equal);
1622
1623 // Fall through to the general case.
1624 }
1625 __ bind(&slow);
1626 }
1627
1628 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001629 Label non_number_comparison;
1630 Label unordered;
1631 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1632 __ xorl(rax, rax);
1633 __ xorl(rcx, rcx);
1634 __ ucomisd(xmm0, xmm1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001635
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636 // Don't base result on EFLAGS when a NaN is involved.
1637 __ j(parity_even, &unordered, Label::kNear);
1638 // Return a result of -1, 0, or 1, based on EFLAGS.
1639 __ setcc(above, rax);
1640 __ setcc(below, rcx);
1641 __ subp(rax, rcx);
1642 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001643
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001644 // If one of the numbers was NaN, then the result is always false.
1645 // The cc is never not-equal.
1646 __ bind(&unordered);
1647 DCHECK(cc != not_equal);
1648 if (cc == less || cc == less_equal) {
1649 __ Set(rax, 1);
1650 } else {
1651 __ Set(rax, -1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001652 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001654
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 // The number comparison code did not provide a valid result.
1656 __ bind(&non_number_comparison);
1657
1658 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001659 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001660 if (cc == equal) {
1661 BranchIfNotInternalizedString(
1662 masm, &check_for_strings, rax, kScratchRegister);
1663 BranchIfNotInternalizedString(
1664 masm, &check_for_strings, rdx, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001665
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001666 // We've already checked for object identity, so if both operands are
1667 // internalized strings they aren't equal. Register rax (not rax) already
1668 // holds a non-zero value, which indicates not equal, so just return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001669 __ ret(0);
1670 }
1671
1672 __ bind(&check_for_strings);
1673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001674 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1675 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001676
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001677 // Inline comparison of one-byte strings.
1678 if (cc == equal) {
1679 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001680 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1682 rdi, r8);
Ben Murdoch257744e2011-11-30 15:57:28 +00001683 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001684
1685#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001687#endif
1688
1689 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001691 // Not strict equality. Objects are unequal if
1692 // they are both JSObjects and not undetectable,
1693 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00001694 Label not_both_objects, return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001695 // At most one is a smi, so we can test for smi by adding the two.
1696 // A smi plus a heap object has the low bit set, a heap object plus
1697 // a heap object has the low bit clear.
1698 STATIC_ASSERT(kSmiTag == 0);
1699 STATIC_ASSERT(kSmiTagMask == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001700 __ leap(rcx, Operand(rax, rdx, times_1, 0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001701 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001702 __ j(not_zero, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001703 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001704 __ j(below, &not_both_objects, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001705 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001706 __ j(below, &not_both_objects, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001707 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1708 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00001709 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001710 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1711 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00001712 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001713 // The objects are both undetectable, so they both compare as the value
1714 // undefined, and are equal.
1715 __ Set(rax, EQUAL);
1716 __ bind(&return_unequal);
Steve Block1e0659c2011-05-24 12:43:12 +01001717 // Return non-equal by returning the non-zero object pointer in rax,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001718 // or return equal if we fell through to here.
1719 __ ret(0);
1720 __ bind(&not_both_objects);
1721 }
1722
1723 // Push arguments below the return address to prepare jump to builtin.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001724 __ PopReturnAddressTo(rcx);
1725 __ Push(rdx);
1726 __ Push(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001727
1728 // Figure out which native to call and setup the arguments.
1729 Builtins::JavaScript builtin;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001730 if (cc == equal) {
1731 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001732 } else {
1733 builtin = Builtins::COMPARE;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001734 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001735 }
1736
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001737 __ PushReturnAddressFrom(rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001738
1739 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1740 // tagged as a small integer.
1741 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001742
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001743 __ bind(&miss);
1744 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001745}
1746
1747
1748static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001749 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001750 // are uninitialized, monomorphic (indicated by a JSFunction), and
1751 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752 // rax : number of arguments to the construct function
1753 // rbx : Feedback vector
1754 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001755 // rdi : the function to call
1756 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 Label initialize, done, miss, megamorphic, not_array_function,
1758 done_no_smi_convert;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759
1760 // Load the cache state into rcx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001761 __ SmiToInteger32(rdx, rdx);
1762 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
1763 FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001764
1765 // A monomorphic cache hit or an already megamorphic state: invoke the
1766 // function without changing the state.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 __ cmpp(rcx, rdi);
1768 __ j(equal, &done);
1769 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
1770 __ j(equal, &done);
1771
1772 if (!FLAG_pretenuring_call_new) {
1773 // If we came here, we need to see if we are the array function.
1774 // If we didn't have a matching function, and we didn't find the megamorph
1775 // sentinel, then we have in the slot either some other function or an
1776 // AllocationSite. Do a map check on the object in rcx.
1777 Handle<Map> allocation_site_map =
1778 masm->isolate()->factory()->allocation_site_map();
1779 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
1780 __ j(not_equal, &miss);
1781
1782 // Make sure the function is the Array() function
1783 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
1784 __ cmpp(rdi, rcx);
1785 __ j(not_equal, &megamorphic);
1786 __ jmp(&done);
1787 }
1788
1789 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001790
1791 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1792 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1794 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001795 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1796 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 __ bind(&megamorphic);
1798 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1799 TypeFeedbackVector::MegamorphicSentinel(isolate));
1800 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 // An uninitialized cache is patched with the function or sentinel to
1803 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001804 __ bind(&initialize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805
1806 if (!FLAG_pretenuring_call_new) {
1807 // Make sure the function is the Array() function
1808 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
1809 __ cmpp(rdi, rcx);
1810 __ j(not_equal, &not_array_function);
1811
1812 {
1813 FrameScope scope(masm, StackFrame::INTERNAL);
1814
1815 // Arguments register must be smi-tagged to call out.
1816 __ Integer32ToSmi(rax, rax);
1817 __ Push(rax);
1818 __ Push(rdi);
1819 __ Integer32ToSmi(rdx, rdx);
1820 __ Push(rdx);
1821 __ Push(rbx);
1822
1823 CreateAllocationSiteStub create_stub(isolate);
1824 __ CallStub(&create_stub);
1825
1826 __ Pop(rbx);
1827 __ Pop(rdx);
1828 __ Pop(rdi);
1829 __ Pop(rax);
1830 __ SmiToInteger32(rax, rax);
1831 }
1832 __ jmp(&done_no_smi_convert);
1833
1834 __ bind(&not_array_function);
1835 }
1836
1837 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1838 rdi);
1839
1840 // We won't need rdx or rbx anymore, just save rdi
1841 __ Push(rdi);
1842 __ Push(rbx);
1843 __ Push(rdx);
1844 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
1845 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1846 __ Pop(rdx);
1847 __ Pop(rbx);
1848 __ Pop(rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001849
1850 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001851 __ Integer32ToSmi(rdx, rdx);
1852
1853 __ bind(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001854}
1855
1856
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1858 // Do not transform the receiver for strict mode functions.
1859 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1860 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
1861 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1862 __ j(not_equal, cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001863
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 // Do not transform the receiver for natives.
1865 // SharedFunctionInfo is already loaded into rcx.
1866 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
1867 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1868 __ j(not_equal, cont);
1869}
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001870
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001871
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872static void EmitSlowCase(Isolate* isolate,
1873 MacroAssembler* masm,
1874 StackArgumentsAccessor* args,
1875 int argc,
1876 Label* non_function) {
Ben Murdoch589d6972011-11-30 16:04:58 +00001877 // Check for function proxy.
1878 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001879 __ j(not_equal, non_function);
1880 __ PopReturnAddressTo(rcx);
1881 __ Push(rdi); // put proxy as additional argument under return address
1882 __ PushReturnAddressFrom(rcx);
1883 __ Set(rax, argc + 1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001884 __ Set(rbx, 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001885 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1886 {
1887 Handle<Code> adaptor =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001888 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
Ben Murdoch589d6972011-11-30 16:04:58 +00001889 __ jmp(adaptor, RelocInfo::CODE_TARGET);
1890 }
1891
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001892 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
1893 // of the original receiver from the call site).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894 __ bind(non_function);
1895 __ movp(args->GetReceiverOperand(), rdi);
1896 __ Set(rax, argc);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001897 __ Set(rbx, 0);
1898 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01001899 Handle<Code> adaptor =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 isolate->builtins()->ArgumentsAdaptorTrampoline();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001901 __ Jump(adaptor, RelocInfo::CODE_TARGET);
1902}
1903
1904
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905static void EmitWrapCase(MacroAssembler* masm,
1906 StackArgumentsAccessor* args,
1907 Label* cont) {
1908 // Wrap the receiver and patch it back onto the stack.
1909 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
1910 __ Push(rdi);
1911 __ Push(rax);
1912 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1913 __ Pop(rdi);
1914 }
1915 __ movp(args->GetReceiverOperand(), rax);
1916 __ jmp(cont);
1917}
1918
1919
1920static void CallFunctionNoFeedback(MacroAssembler* masm,
1921 int argc, bool needs_checks,
1922 bool call_as_method) {
1923 // rdi : the function to call
1924
1925 // wrap_and_call can only be true if we are compiling a monomorphic method.
1926 Isolate* isolate = masm->isolate();
1927 Label slow, non_function, wrap, cont;
1928 StackArgumentsAccessor args(rsp, argc);
1929
1930 if (needs_checks) {
1931 // Check that the function really is a JavaScript function.
1932 __ JumpIfSmi(rdi, &non_function);
1933
1934 // Goto slow case if we do not have a function.
1935 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1936 __ j(not_equal, &slow);
1937 }
1938
1939 // Fast-case: Just invoke the function.
1940 ParameterCount actual(argc);
1941
1942 if (call_as_method) {
1943 if (needs_checks) {
1944 EmitContinueIfStrictOrNative(masm, &cont);
1945 }
1946
1947 // Load the receiver from the stack.
1948 __ movp(rax, args.GetReceiverOperand());
1949
1950 if (needs_checks) {
1951 __ JumpIfSmi(rax, &wrap);
1952
1953 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1954 __ j(below, &wrap);
1955 } else {
1956 __ jmp(&wrap);
1957 }
1958
1959 __ bind(&cont);
1960 }
1961
1962 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
1963
1964 if (needs_checks) {
1965 // Slow-case: Non-function called.
1966 __ bind(&slow);
1967 EmitSlowCase(isolate, masm, &args, argc, &non_function);
1968 }
1969
1970 if (call_as_method) {
1971 __ bind(&wrap);
1972 EmitWrapCase(masm, &args, &cont);
1973 }
1974}
1975
1976
1977void CallFunctionStub::Generate(MacroAssembler* masm) {
1978 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
1979}
1980
1981
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001982void CallConstructStub::Generate(MacroAssembler* masm) {
1983 // rax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 // rbx : feedback vector
1985 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
1986 // vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001987 // rdi : constructor function
1988 Label slow, non_function_call;
1989
1990 // Check that function is not a smi.
1991 __ JumpIfSmi(rdi, &non_function_call);
1992 // Check that function is a JSFunction.
1993 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1994 __ j(not_equal, &slow);
1995
1996 if (RecordCallTarget()) {
1997 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001998
1999 __ SmiToInteger32(rdx, rdx);
2000 if (FLAG_pretenuring_call_new) {
2001 // Put the AllocationSite from the feedback vector into ebx.
2002 // By adding kPointerSize we encode that we know the AllocationSite
2003 // entry is at the feedback vector slot given by rdx + 1.
2004 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2005 FixedArray::kHeaderSize + kPointerSize));
2006 } else {
2007 Label feedback_register_initialized;
2008 // Put the AllocationSite from the feedback vector into rbx, or undefined.
2009 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2010 FixedArray::kHeaderSize));
2011 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
2012 __ j(equal, &feedback_register_initialized);
2013 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2014 __ bind(&feedback_register_initialized);
2015 }
2016
2017 __ AssertUndefinedOrAllocationSite(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002018 }
2019
2020 // Jump to the function-specific construct stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002021 Register jmp_reg = rcx;
2022 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2023 __ movp(jmp_reg, FieldOperand(jmp_reg,
2024 SharedFunctionInfo::kConstructStubOffset));
2025 __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2026 __ jmp(jmp_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002027
2028 // rdi: called object
2029 // rax: number of arguments
2030 // rcx: object map
2031 Label do_call;
2032 __ bind(&slow);
2033 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2034 __ j(not_equal, &non_function_call);
2035 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2036 __ jmp(&do_call);
2037
2038 __ bind(&non_function_call);
2039 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2040 __ bind(&do_call);
2041 // Set expected number of arguments to zero (not changing rax).
2042 __ Set(rbx, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002044 RelocInfo::CODE_TARGET);
2045}
2046
2047
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002048static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2049 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2050 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2051 __ movp(vector, FieldOperand(vector,
2052 SharedFunctionInfo::kFeedbackVectorOffset));
2053}
2054
2055
2056void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2057 // rdi - function
2058 // rdx - slot id (as integer)
2059 Label miss;
2060 int argc = arg_count();
2061 ParameterCount actual(argc);
2062
2063 EmitLoadTypeFeedbackVector(masm, rbx);
2064 __ SmiToInteger32(rdx, rdx);
2065
2066 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2067 __ cmpp(rdi, rcx);
2068 __ j(not_equal, &miss);
2069
2070 __ movp(rax, Immediate(arg_count()));
2071 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2072 FixedArray::kHeaderSize));
2073 // Verify that ecx contains an AllocationSite
2074 Factory* factory = masm->isolate()->factory();
2075 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2076 factory->allocation_site_map());
2077 __ j(not_equal, &miss);
2078
2079 __ movp(rbx, rcx);
2080 ArrayConstructorStub stub(masm->isolate(), arg_count());
2081 __ TailCallStub(&stub);
2082
2083 __ bind(&miss);
2084 GenerateMiss(masm);
2085
2086 // The slow case, we need this no matter what to complete a call after a miss.
2087 CallFunctionNoFeedback(masm,
2088 arg_count(),
2089 true,
2090 CallAsMethod());
2091
2092 // Unreachable.
2093 __ int3();
2094}
2095
2096
2097void CallICStub::Generate(MacroAssembler* masm) {
2098 // rdi - function
2099 // rdx - slot id
2100 Isolate* isolate = masm->isolate();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002101 const int with_types_offset =
2102 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
2103 const int generic_offset =
2104 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002105 Label extra_checks_or_miss, slow_start;
2106 Label slow, non_function, wrap, cont;
2107 Label have_js_function;
2108 int argc = arg_count();
2109 StackArgumentsAccessor args(rsp, argc);
2110 ParameterCount actual(argc);
2111
2112 EmitLoadTypeFeedbackVector(masm, rbx);
2113
2114 // The checks. First, does rdi match the recorded monomorphic target?
2115 __ SmiToInteger32(rdx, rdx);
2116 __ cmpp(rdi, FieldOperand(rbx, rdx, times_pointer_size,
2117 FixedArray::kHeaderSize));
2118 __ j(not_equal, &extra_checks_or_miss);
2119
2120 __ bind(&have_js_function);
2121 if (CallAsMethod()) {
2122 EmitContinueIfStrictOrNative(masm, &cont);
2123
2124 // Load the receiver from the stack.
2125 __ movp(rax, args.GetReceiverOperand());
2126
2127 __ JumpIfSmi(rax, &wrap);
2128
2129 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2130 __ j(below, &wrap);
2131
2132 __ bind(&cont);
2133 }
2134
2135 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2136
2137 __ bind(&slow);
2138 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2139
2140 if (CallAsMethod()) {
2141 __ bind(&wrap);
2142 EmitWrapCase(masm, &args, &cont);
2143 }
2144
2145 __ bind(&extra_checks_or_miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002146 Label uninitialized, miss;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002147
2148 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2149 FixedArray::kHeaderSize));
2150 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
2151 __ j(equal, &slow_start);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002152
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002153 // The following cases attempt to handle MISS cases without going to the
2154 // runtime.
2155 if (FLAG_trace_ic) {
2156 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002157 }
2158
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002159 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
2160 __ j(equal, &uninitialized);
2161
2162 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2163 // to handle it here. More complex cases are dealt with in the runtime.
2164 __ AssertNotSmi(rcx);
2165 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
2166 __ j(not_equal, &miss);
2167 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2168 TypeFeedbackVector::MegamorphicSentinel(isolate));
2169 // We have to update statistics for runtime profiling.
2170 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(-1));
2171 __ SmiAddConstant(FieldOperand(rbx, generic_offset), Smi::FromInt(1));
2172 __ jmp(&slow_start);
2173
2174 __ bind(&uninitialized);
2175
2176 // We are going monomorphic, provided we actually have a JSFunction.
2177 __ JumpIfSmi(rdi, &miss);
2178
2179 // Goto miss case if we do not have a function.
2180 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2181 __ j(not_equal, &miss);
2182
2183 // Make sure the function is not the Array() function, which requires special
2184 // behavior on MISS.
2185 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2186 __ cmpp(rdi, rcx);
2187 __ j(equal, &miss);
2188
2189 // Update stats.
2190 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1));
2191
2192 // Store the function.
2193 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2194 rdi);
2195
2196 // Update the write barrier.
2197 __ movp(rax, rdi);
2198 __ RecordWriteArray(rbx, rax, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
2199 OMIT_SMI_CHECK);
2200 __ jmp(&have_js_function);
2201
2202 // We are here because tracing is on or we encountered a MISS case we can't
2203 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002204 __ bind(&miss);
2205 GenerateMiss(masm);
2206
2207 // the slow case
2208 __ bind(&slow_start);
2209 // Check that function is not a smi.
2210 __ JumpIfSmi(rdi, &non_function);
2211 // Check that function is a JSFunction.
2212 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2213 __ j(not_equal, &slow);
2214 __ jmp(&have_js_function);
2215
2216 // Unreachable
2217 __ int3();
2218}
2219
2220
2221void CallICStub::GenerateMiss(MacroAssembler* masm) {
2222 // Get the receiver of the function from the stack; 1 ~ return address.
2223 __ movp(rcx, Operand(rsp, (arg_count() + 1) * kPointerSize));
2224
2225 {
2226 FrameScope scope(masm, StackFrame::INTERNAL);
2227
2228 // Push the receiver and the function and feedback info.
2229 __ Push(rcx);
2230 __ Push(rdi);
2231 __ Push(rbx);
2232 __ Integer32ToSmi(rdx, rdx);
2233 __ Push(rdx);
2234
2235 // Call the entry.
2236 IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss
2237 : IC::kCallIC_Customization_Miss;
2238
2239 ExternalReference miss = ExternalReference(IC_Utility(id),
2240 masm->isolate());
2241 __ CallExternalReference(miss, 4);
2242
2243 // Move result to edi and exit the internal frame.
2244 __ movp(rdi, rax);
2245 }
2246}
2247
2248
Steve Block44f0eee2011-05-26 01:26:41 +01002249bool CEntryStub::NeedsImmovableCode() {
2250 return false;
2251}
2252
2253
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2255 CEntryStub::GenerateAheadOfTime(isolate);
2256 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2257 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002258 // It is important that the store buffer overflow stubs are generated first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002259 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2260 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2261 BinaryOpICStub::GenerateAheadOfTime(isolate);
2262 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002263}
2264
2265
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002266void CodeStub::GenerateFPStubs(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002267}
2268
2269
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2271 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2272 stub.GetCode();
2273 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2274 save_doubles.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002275}
2276
2277
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002278void CEntryStub::Generate(MacroAssembler* masm) {
2279 // rax: number of arguments including receiver
2280 // rbx: pointer to C function (C callee-saved)
2281 // rbp: frame pointer of calling JS frame (restored after C call)
2282 // rsp: stack pointer (restored after C call)
2283 // rsi: current context (restored)
2284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002286
2287 // Enter the exit frame that transitions from JavaScript to C++.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002288#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289 int arg_stack_space = (result_size() < 2 ? 2 : 4);
2290#else // _WIN64
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002291 int arg_stack_space = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292#endif // _WIN64
2293 __ EnterExitFrame(arg_stack_space, save_doubles());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002294
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002295 // rbx: pointer to builtin function (C callee-saved).
2296 // rbp: frame pointer of exit frame (restored after C call).
2297 // rsp: stack pointer (restored after C call).
2298 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01002299 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002300
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2302 // Complex results must be written to address passed as first argument.
2303 // AMD64 calling convention: a struct of two pointers in rax+rdx
2304
2305 // Check stack alignment.
2306 if (FLAG_debug_code) {
2307 __ CheckStackAlignment();
2308 }
2309
2310 // Call C function.
2311#ifdef _WIN64
2312 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2313 // Pass argv and argc as two parameters. The arguments object will
2314 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2315 if (result_size() < 2) {
2316 // Pass a pointer to the Arguments object as the first argument.
2317 // Return result in single register (rax).
2318 __ movp(rcx, r14); // argc.
2319 __ movp(rdx, r15); // argv.
2320 __ Move(r8, ExternalReference::isolate_address(isolate()));
2321 } else {
2322 DCHECK_EQ(2, result_size());
2323 // Pass a pointer to the result location as the first argument.
2324 __ leap(rcx, StackSpaceOperand(2));
2325 // Pass a pointer to the Arguments object as the second argument.
2326 __ movp(rdx, r14); // argc.
2327 __ movp(r8, r15); // argv.
2328 __ Move(r9, ExternalReference::isolate_address(isolate()));
2329 }
2330
2331#else // _WIN64
2332 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2333 __ movp(rdi, r14); // argc.
2334 __ movp(rsi, r15); // argv.
2335 __ Move(rdx, ExternalReference::isolate_address(isolate()));
2336#endif // _WIN64
2337 __ call(rbx);
2338 // Result is in rax - do not destroy this register!
2339
2340#ifdef _WIN64
2341 // If return value is on the stack, pop it to registers.
2342 if (result_size() > 1) {
2343 DCHECK_EQ(2, result_size());
2344 // Read result values stored on stack. Result is stored
2345 // above the four argument mirror slots and the two
2346 // Arguments object slots.
2347 __ movq(rax, Operand(rsp, 6 * kRegisterSize));
2348 __ movq(rdx, Operand(rsp, 7 * kRegisterSize));
2349 }
2350#endif // _WIN64
2351
2352 // Runtime functions should not return 'the hole'. Allowing it to escape may
2353 // lead to crashes in the IC code later.
2354 if (FLAG_debug_code) {
2355 Label okay;
2356 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2357 __ j(not_equal, &okay, Label::kNear);
2358 __ int3();
2359 __ bind(&okay);
2360 }
2361
2362 // Check result for exception sentinel.
2363 Label exception_returned;
2364 __ CompareRoot(rax, Heap::kExceptionRootIndex);
2365 __ j(equal, &exception_returned);
2366
2367 ExternalReference pending_exception_address(
2368 Isolate::kPendingExceptionAddress, isolate());
2369
2370 // Check that there is no pending exception, otherwise we
2371 // should have returned the exception sentinel.
2372 if (FLAG_debug_code) {
2373 Label okay;
2374 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2375 Operand pending_exception_operand =
2376 masm->ExternalOperand(pending_exception_address);
2377 __ cmpp(r14, pending_exception_operand);
2378 __ j(equal, &okay, Label::kNear);
2379 __ int3();
2380 __ bind(&okay);
2381 }
2382
2383 // Exit the JavaScript to C++ exit frame.
2384 __ LeaveExitFrame(save_doubles());
2385 __ ret(0);
2386
2387 // Handling of exception.
2388 __ bind(&exception_returned);
2389
2390 // Retrieve the pending exception.
2391 Operand pending_exception_operand =
2392 masm->ExternalOperand(pending_exception_address);
2393 __ movp(rax, pending_exception_operand);
2394
2395 // Clear the pending exception.
2396 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2397 __ movp(pending_exception_operand, rdx);
2398
2399 // Special handling of termination exceptions which are uncatchable
2400 // by javascript code.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002401 Label throw_termination_exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002402 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2403 __ j(equal, &throw_termination_exception);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002404
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002405 // Handle normal exception.
2406 __ Throw(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002407
2408 __ bind(&throw_termination_exception);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002409 __ ThrowUncatchable(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002410}
2411
2412
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002414 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002415 Label not_outermost_js, not_outermost_js_2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002416
2417 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2418
Steve Block44f0eee2011-05-26 01:26:41 +01002419 { // NOLINT. Scope block confuses linter.
2420 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002421 // Set up frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002422 __ pushq(rbp);
2423 __ movp(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002424
Steve Block44f0eee2011-05-26 01:26:41 +01002425 // Push the stack frame type marker twice.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426 int marker = type();
Steve Block44f0eee2011-05-26 01:26:41 +01002427 // Scratch register is neither callee-save, nor an argument register on any
2428 // platform. It's free to use at this point.
2429 // Cannot use smi-register for loading yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002430 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
2431 __ Push(kScratchRegister); // context slot
2432 __ Push(kScratchRegister); // function slot
2433 // Save callee-saved registers (X64/X32/Win64 calling conventions).
2434 __ pushq(r12);
2435 __ pushq(r13);
2436 __ pushq(r14);
2437 __ pushq(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002438#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002439 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2440 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002441#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 __ pushq(rbx);
2443
2444#ifdef _WIN64
2445 // On Win64 XMM6-XMM15 are callee-save
2446 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2447 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
2448 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
2449 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
2450 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
2451 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
2452 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
2453 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
2454 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
2455 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2456 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2457#endif
Steve Block44f0eee2011-05-26 01:26:41 +01002458
2459 // Set up the roots and smi constant registers.
2460 // Needs to be done before any further smi loads.
2461 __ InitializeSmiConstantRegister();
2462 __ InitializeRootRegister();
2463 }
2464
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002465 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002466 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002467 {
2468 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002469 __ Push(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002470 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002471
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002472 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002473 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002474 __ Load(rax, js_entry_sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002475 __ testp(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002476 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01002477 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002478 __ movp(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01002479 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01002480 Label cont;
2481 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002482 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01002483 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
2484 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002485
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002486 // Jump to a faked try block that does the invoke, with a faked catch
2487 // block that sets the pending exception.
2488 __ jmp(&invoke);
2489 __ bind(&handler_entry);
2490 handler_offset_ = handler_entry.pos();
2491 // Caught exception: Store result (exception) in the pending exception
2492 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00002493 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002494 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002495 __ Store(pending_exception, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002496 __ LoadRoot(rax, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002497 __ jmp(&exit);
2498
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002499 // Invoke: Link this frame into the handler chain. There's only one
2500 // handler block in this code object, so its index is 0.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002501 __ bind(&invoke);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002502 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002503
2504 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01002505 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2506 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002507
2508 // Fake a receiver (NULL).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002509 __ Push(Immediate(0)); // receiver
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002510
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002511 // Invoke the function by calling through JS entry trampoline builtin and
2512 // pop the faked function when we return. We load the address from an
2513 // external reference instead of inlining the call target address directly
2514 // in the code, because the builtin stubs may not have been generated yet
2515 // at the time this code is generated.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002516 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01002517 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002518 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002519 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002520 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002521 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002522 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002523 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002524 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002525 __ call(kScratchRegister);
2526
2527 // Unlink this frame from the handler chain.
Steve Block053d10c2011-06-13 19:13:29 +01002528 __ PopTryHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002529
Steve Block053d10c2011-06-13 19:13:29 +01002530 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01002531 // Check if the current stack frame is marked as the outermost JS frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002532 __ Pop(rbx);
Steve Block053d10c2011-06-13 19:13:29 +01002533 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002534 __ j(not_equal, &not_outermost_js_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002535 __ Move(kScratchRegister, js_entry_sp);
2536 __ movp(Operand(kScratchRegister, 0), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002537 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002538
2539 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01002540 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002541 __ Pop(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002542 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002543
2544 // Restore callee-saved registers (X64 conventions).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002545#ifdef _WIN64
2546 // On Win64 XMM6-XMM15 are callee-save
2547 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2548 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2549 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2550 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2551 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2552 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2553 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2554 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2555 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2556 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2557 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2558#endif
2559
2560 __ popq(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002561#ifdef _WIN64
2562 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002563 __ popq(rsi);
2564 __ popq(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002565#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002566 __ popq(r15);
2567 __ popq(r14);
2568 __ popq(r13);
2569 __ popq(r12);
2570 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002571
2572 // Restore frame pointer and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002573 __ popq(rbp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002574 __ ret(0);
2575}
2576
2577
2578void InstanceofStub::Generate(MacroAssembler* masm) {
2579 // Implements "value instanceof function" operator.
Steve Block44f0eee2011-05-26 01:26:41 +01002580 // Expected input state with no inline cache:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002581 // rsp[0] : return address
2582 // rsp[8] : function pointer
2583 // rsp[16] : value
Steve Block44f0eee2011-05-26 01:26:41 +01002584 // Expected input state with an inline one-element cache:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002585 // rsp[0] : return address
2586 // rsp[8] : offset from return address to location of inline cache
2587 // rsp[16] : function pointer
2588 // rsp[24] : value
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002589 // Returns a bitwise zero to indicate that the value
2590 // is and instance of the function and anything else to
2591 // indicate that the value is not an instance.
2592
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002593 // Fixed register usage throughout the stub.
2594 Register object = rax; // Object (lhs).
2595 Register map = rbx; // Map of the object.
2596 Register function = rdx; // Function (rhs).
2597 Register prototype = rdi; // Prototype of the function.
2598 Register scratch = rcx;
2599
Ben Murdoch8b112d22011-06-08 16:22:53 +01002600 static const int kOffsetToMapCheckValue = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601 static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14;
Steve Block44f0eee2011-05-26 01:26:41 +01002602 // The last 4 bytes of the instruction sequence
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002603 // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset))
2604 // Move(kScratchRegister, Factory::the_hole_value())
Steve Block44f0eee2011-05-26 01:26:41 +01002605 // in front of the hole value address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002606 static const unsigned int kWordBeforeMapCheckValue =
2607 kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78;
Steve Block44f0eee2011-05-26 01:26:41 +01002608 // The last 4 bytes of the instruction sequence
2609 // __ j(not_equal, &cache_miss);
2610 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2611 // before the offset of the hole value in the root array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002612 static const unsigned int kWordBeforeResultValue =
2613 kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002614
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002615 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
2616
2617 DCHECK_EQ(object.code(), InstanceofStub::left().code());
2618 DCHECK_EQ(function.code(), InstanceofStub::right().code());
2619
2620 // Get the object and function - they are always both needed.
2621 // Go slow case if the object is a smi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002622 Label slow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002623 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
2624 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2625 if (!HasArgsInRegisters()) {
2626 __ movp(object, args.GetArgumentOperand(0));
2627 __ movp(function, args.GetArgumentOperand(1));
2628 }
2629 __ JumpIfSmi(object, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002630
2631 // Check that the left hand is a JS object. Leave its map in rax.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002632 __ CmpObjectType(object, FIRST_SPEC_OBJECT_TYPE, map);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002633 __ j(below, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002634 __ CmpInstanceType(map, LAST_SPEC_OBJECT_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002635 __ j(above, &slow);
2636
Steve Block44f0eee2011-05-26 01:26:41 +01002637 // If there is a call site cache don't look in the global cache, but do the
2638 // real lookup and update the call site cache.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002639 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002640 // Look up the function and the map in the instanceof cache.
Ben Murdoch257744e2011-11-30 15:57:28 +00002641 Label miss;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002642 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00002643 __ j(not_equal, &miss, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00002645 __ j(not_equal, &miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002646 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002647 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01002648 __ bind(&miss);
2649 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002651 // Get the prototype of the function.
2652 __ TryGetFunctionPrototype(function, prototype, &slow, true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002653
2654 // Check that the function prototype is a JS object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002655 __ JumpIfSmi(prototype, &slow);
2656 __ CmpObjectType(prototype, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002657 __ j(below, &slow);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002658 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002659 __ j(above, &slow);
2660
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661 // Update the global instanceof or call site inlined cache with the current
2662 // map and function. The cached answer will be set when it is known below.
Steve Block44f0eee2011-05-26 01:26:41 +01002663 if (!HasCallSiteInlineCheck()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002664 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2665 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01002666 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002667 // The constants for the code patching are based on push instructions
2668 // at the call site.
2669 DCHECK(!HasArgsInRegisters());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002670 // Get return address and delta to inlined map check.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002671 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2672 __ subp(kScratchRegister, args.GetArgumentOperand(2));
Steve Block44f0eee2011-05-26 01:26:41 +01002673 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 __ movl(scratch, Immediate(kWordBeforeMapCheckValue));
2675 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), scratch);
2676 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
Steve Block44f0eee2011-05-26 01:26:41 +01002677 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002678 __ movp(kScratchRegister,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002679 Operand(kScratchRegister, kOffsetToMapCheckValue));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002680 __ movp(Operand(kScratchRegister, 0), map);
Steve Block44f0eee2011-05-26 01:26:41 +01002681 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002682
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002683 // Loop through the prototype chain looking for the function prototype.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002684 __ movp(scratch, FieldOperand(map, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002685 Label loop, is_instance, is_not_instance;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002686 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
2687 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002688 __ cmpp(scratch, prototype);
Ben Murdoch257744e2011-11-30 15:57:28 +00002689 __ j(equal, &is_instance, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002690 __ cmpp(scratch, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002691 // The code at is_not_instance assumes that kScratchRegister contains a
2692 // non-zero GCable value (the null object in this case).
Ben Murdoch257744e2011-11-30 15:57:28 +00002693 __ j(equal, &is_not_instance, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002694 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2695 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002696 __ jmp(&loop);
2697
2698 __ bind(&is_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01002699 if (!HasCallSiteInlineCheck()) {
2700 __ xorl(rax, rax);
2701 // Store bitwise zero in the cache. This is a Smi in GC terms.
2702 STATIC_ASSERT(kSmiTag == 0);
2703 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002704 if (ReturnTrueFalseObject()) {
2705 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2706 }
Steve Block44f0eee2011-05-26 01:26:41 +01002707 } else {
2708 // Store offset of true in the root array at the inline check site.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002709 int true_offset = 0x100 +
2710 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2711 // Assert it is a 1-byte signed value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002712 DCHECK(true_offset >= 0 && true_offset < 0x100);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002713 __ movl(rax, Immediate(true_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002714 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2715 __ subp(kScratchRegister, args.GetArgumentOperand(2));
Steve Block44f0eee2011-05-26 01:26:41 +01002716 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2717 if (FLAG_debug_code) {
2718 __ movl(rax, Immediate(kWordBeforeResultValue));
2719 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002720 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
Steve Block44f0eee2011-05-26 01:26:41 +01002721 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002722 if (!ReturnTrueFalseObject()) {
2723 __ Set(rax, 0);
2724 }
Steve Block44f0eee2011-05-26 01:26:41 +01002725 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002726 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2727 kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002728
2729 __ bind(&is_not_instance);
Steve Block44f0eee2011-05-26 01:26:41 +01002730 if (!HasCallSiteInlineCheck()) {
2731 // We have to store a non-zero value in the cache.
2732 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002733 if (ReturnTrueFalseObject()) {
2734 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2735 }
Steve Block44f0eee2011-05-26 01:26:41 +01002736 } else {
2737 // Store offset of false in the root array at the inline check site.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002738 int false_offset = 0x100 +
2739 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2740 // Assert it is a 1-byte signed value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002741 DCHECK(false_offset >= 0 && false_offset < 0x100);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002742 __ movl(rax, Immediate(false_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002743 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2744 __ subp(kScratchRegister, args.GetArgumentOperand(2));
Steve Block44f0eee2011-05-26 01:26:41 +01002745 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2746 if (FLAG_debug_code) {
2747 __ movl(rax, Immediate(kWordBeforeResultValue));
2748 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
Steve Block44f0eee2011-05-26 01:26:41 +01002750 }
2751 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002752 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2753 kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002754
2755 // Slow-case: Go through the JavaScript implementation.
2756 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002757 if (!ReturnTrueFalseObject()) {
2758 // Tail call the builtin which returns 0 or 1.
2759 DCHECK(!HasArgsInRegisters());
2760 if (HasCallSiteInlineCheck()) {
2761 // Remove extra value from the stack.
2762 __ PopReturnAddressTo(rcx);
2763 __ Pop(rax);
2764 __ PushReturnAddressFrom(rcx);
2765 }
2766 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2767 } else {
2768 // Call the builtin and convert 0/1 to true/false.
2769 {
2770 FrameScope scope(masm, StackFrame::INTERNAL);
2771 __ Push(object);
2772 __ Push(function);
2773 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2774 }
2775 Label true_value, done;
2776 __ testq(rax, rax);
2777 __ j(zero, &true_value, Label::kNear);
2778 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2779 __ jmp(&done, Label::kNear);
2780 __ bind(&true_value);
2781 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2782 __ bind(&done);
2783 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2784 kPointerSize);
Steve Block44f0eee2011-05-26 01:26:41 +01002785 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002786}
2787
2788
2789// -------------------------------------------------------------------------
2790// StringCharCodeAtGenerator
2791
2792void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002793 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002794 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2795 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002796
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002797 // Fetch the instance type of the receiver into result register.
2798 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2799 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2800 // If the receiver is not a string trigger the non-string case.
2801 __ testb(result_, Immediate(kIsNotStringMask));
2802 __ j(not_zero, receiver_not_string_);
2803 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002804
2805 // If the index is non-smi trigger the non-smi case.
2806 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002807 __ bind(&got_smi_index_);
2808
2809 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002810 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002811 __ j(above_equal, index_out_of_range_);
2812
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002813 __ SmiToInteger32(index_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002814
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002815 StringCharLoadGenerator::Generate(
2816 masm, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002817
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002818 __ Integer32ToSmi(result_, result_);
2819 __ bind(&exit_);
2820}
2821
2822
2823void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002824 MacroAssembler* masm,
2825 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002826 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002827
Ben Murdoch257744e2011-11-30 15:57:28 +00002828 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002829 // Index is not a smi.
2830 __ bind(&index_not_smi_);
2831 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00002832 __ CheckMap(index_,
2833 factory->heap_number_map(),
2834 index_not_number_,
2835 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002836 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002837 __ Push(object_);
2838 __ Push(index_); // Consumed by runtime conversion function.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002839 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2840 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2841 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002842 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002843 // NumberToSmi discards numbers that are not exact integers.
2844 __ CallRuntime(Runtime::kNumberToSmi, 1);
2845 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002846 if (!index_.is(rax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002847 // Save the conversion result before the pop instructions below
2848 // have a chance to overwrite it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002849 __ movp(index_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002850 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002851 __ Pop(object_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002852 // Reload the instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002854 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2855 call_helper.AfterCall(masm);
2856 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002857 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002858 // Otherwise, return to the fast path.
2859 __ jmp(&got_smi_index_);
2860
2861 // Call runtime. We get here when the receiver is a string and the
2862 // index is a number, but the code of getting the actual character
2863 // is too complex (e.g., when the string needs to be flattened).
2864 __ bind(&call_runtime_);
2865 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002866 __ Push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002867 __ Integer32ToSmi(index_, index_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002868 __ Push(index_);
2869 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002870 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002871 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002872 }
2873 call_helper.AfterCall(masm);
2874 __ jmp(&exit_);
2875
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002876 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002877}
2878
2879
2880// -------------------------------------------------------------------------
2881// StringCharFromCodeGenerator
2882
2883void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2884 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2885 __ JumpIfNotSmi(code_, &slow_case_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002886 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002887 __ j(above, &slow_case_);
2888
2889 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2890 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002891 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002892 FixedArray::kHeaderSize));
2893 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2894 __ j(equal, &slow_case_);
2895 __ bind(&exit_);
2896}
2897
2898
2899void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002900 MacroAssembler* masm,
2901 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002902 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002903
2904 __ bind(&slow_case_);
2905 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002906 __ Push(code_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002907 __ CallRuntime(Runtime::kCharFromCode, 1);
2908 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002910 }
2911 call_helper.AfterCall(masm);
2912 __ jmp(&exit_);
2913
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002914 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002915}
2916
2917
2918void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2919 Register dest,
2920 Register src,
2921 Register count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002922 String::Encoding encoding) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002923 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00002924 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002925 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002926 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002927
2928 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002929 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002930 STATIC_ASSERT(2 == sizeof(uc16));
2931 __ addl(count, count);
2932 }
2933
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002934 // Copy remaining characters.
2935 Label loop;
2936 __ bind(&loop);
2937 __ movb(kScratchRegister, Operand(src, 0));
2938 __ movb(Operand(dest, 0), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939 __ incp(src);
2940 __ incp(dest);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002941 __ decl(count);
2942 __ j(not_zero, &loop);
2943
2944 __ bind(&done);
2945}
2946
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002947
2948void SubStringStub::Generate(MacroAssembler* masm) {
2949 Label runtime;
2950
2951 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002952 // rsp[0] : return address
2953 // rsp[8] : to
2954 // rsp[16] : from
2955 // rsp[24] : string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002956
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002957 enum SubStringStubArgumentIndices {
2958 STRING_ARGUMENT_INDEX,
2959 FROM_ARGUMENT_INDEX,
2960 TO_ARGUMENT_INDEX,
2961 SUB_STRING_ARGUMENT_COUNT
2962 };
2963
2964 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2965 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002966
2967 // Make sure first argument is a string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002969 STATIC_ASSERT(kSmiTag == 0);
2970 __ testl(rax, Immediate(kSmiTagMask));
2971 __ j(zero, &runtime);
2972 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2973 __ j(NegateCondition(is_string), &runtime);
2974
2975 // rax: string
2976 // rbx: instance type
2977 // Calculate length of sub string using the smi values.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002978 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2979 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
Ben Murdochf87a2032010-10-22 12:50:53 +01002980 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002981
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002982 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002984 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002985 // Shorter than original string's length: an actual substring.
2986 __ j(below, &not_original_string, Label::kNear);
2987 // Longer than original string's length or negative: unsafe arguments.
2988 __ j(above, &runtime);
2989 // Return original string.
2990 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002991 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002992 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002993 __ bind(&not_original_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002994
2995 Label single_char;
2996 __ SmiCompare(rcx, Smi::FromInt(1));
2997 __ j(equal, &single_char);
2998
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002999 __ SmiToInteger32(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003000
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003001 // rax: string
3002 // rbx: instance type
3003 // rcx: sub string length
3004 // rdx: from index (smi)
3005 // Deal with different string types: update the index if necessary
3006 // and put the underlying string into edi.
3007 Label underlying_unpacked, sliced_string, seq_or_external_string;
3008 // If the string is not indirect, it can only be sequential or external.
3009 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
3010 STATIC_ASSERT(kIsIndirectStringMask != 0);
3011 __ testb(rbx, Immediate(kIsIndirectStringMask));
3012 __ j(zero, &seq_or_external_string, Label::kNear);
3013
3014 __ testb(rbx, Immediate(kSlicedNotConsMask));
3015 __ j(not_zero, &sliced_string, Label::kNear);
3016 // Cons string. Check whether it is flat, then fetch first part.
3017 // Flat cons strings have an empty second part.
3018 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003019 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003020 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003021 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003022 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003023 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003024 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003025 __ jmp(&underlying_unpacked, Label::kNear);
3026
3027 __ bind(&sliced_string);
3028 // Sliced string. Fetch parent and correct start index by offset.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003029 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
3030 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003031 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003032 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003033 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3034 __ jmp(&underlying_unpacked, Label::kNear);
3035
3036 __ bind(&seq_or_external_string);
3037 // Sequential or external string. Just move string to the correct register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003038 __ movp(rdi, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003039
3040 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003041
Ben Murdoch589d6972011-11-30 16:04:58 +00003042 if (FLAG_string_slices) {
3043 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003044 // rdi: underlying subject string
3045 // rbx: instance type of underlying subject string
3046 // rdx: adjusted start index (smi)
3047 // rcx: length
Ben Murdoch589d6972011-11-30 16:04:58 +00003048 // If coming from the make_two_character_string path, the string
3049 // is too short to be sliced anyways.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003050 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
Ben Murdoch589d6972011-11-30 16:04:58 +00003051 // Short slice. Copy instead of slicing.
3052 __ j(less, &copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00003053 // Allocate new sliced string. At this point we do not reload the instance
3054 // type including the string encoding because we simply rely on the info
3055 // provided by the original string. It does not matter if the original
3056 // string's encoding is wrong because we always have to recheck encoding of
3057 // the newly created string's parent anyways due to externalized strings.
3058 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003059 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003060 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3061 __ testb(rbx, Immediate(kStringEncodingMask));
3062 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003063 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00003064 __ jmp(&set_slice_header, Label::kNear);
3065 __ bind(&two_byte_slice);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003066 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00003067 __ bind(&set_slice_header);
Ben Murdoch589d6972011-11-30 16:04:58 +00003068 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003069 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
3070 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00003071 Immediate(String::kEmptyHashField));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003072 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
3073 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003074 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003075 __ ret(3 * kPointerSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00003076
3077 __ bind(&copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00003078 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003079
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003080 // rdi: underlying subject string
3081 // rbx: instance type of underlying subject string
3082 // rdx: adjusted start index (smi)
3083 // rcx: length
3084 // The subject string can only be external or sequential string of either
3085 // encoding at this point.
3086 Label two_byte_sequential, sequential_string;
3087 STATIC_ASSERT(kExternalStringTag != 0);
3088 STATIC_ASSERT(kSeqStringTag == 0);
3089 __ testb(rbx, Immediate(kExternalStringTag));
3090 __ j(zero, &sequential_string);
3091
3092 // Handle external string.
3093 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003094 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003095 __ testb(rbx, Immediate(kShortExternalStringMask));
3096 __ j(not_zero, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003097 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003098 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003099 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3100 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003101
3102 __ bind(&sequential_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003103 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003104 __ testb(rbx, Immediate(kStringEncodingMask));
3105 __ j(zero, &two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003106
3107 // Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003109
3110 // rax: result string
3111 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003112 { // Locate character of sub string start.
3113 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003114 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3115 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01003116 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003117 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003118 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003119
3120 // rax: result string
3121 // rcx: result length
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003122 // r14: first character of result
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003123 // rsi: character of sub string start
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003124 StringHelper::GenerateCopyCharacters(
3125 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01003126 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003127 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003128
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003129 __ bind(&two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003130 // Allocate the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003131 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003132
3133 // rax: result string
3134 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003135 { // Locate character of sub string start.
3136 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003137 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3138 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01003139 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003140 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003141 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003142
3143 // rax: result string
3144 // rcx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003145 // rdi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003146 // r14: character of sub string start
3147 StringHelper::GenerateCopyCharacters(
3148 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01003149 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003150 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003151
3152 // Just jump to runtime to create the sub string.
3153 __ bind(&runtime);
3154 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155
3156 __ bind(&single_char);
3157 // rax: string
3158 // rbx: instance type
3159 // rcx: sub string length (smi)
3160 // rdx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003161 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
3162 &runtime, STRING_INDEX_IS_NUMBER,
3163 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003164 generator.GenerateFast(masm);
3165 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3166 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003167}
3168
3169
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003170void ToNumberStub::Generate(MacroAssembler* masm) {
3171 // The ToNumber stub takes one argument in rax.
3172 Label not_smi;
3173 __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
3174 __ Ret();
3175 __ bind(&not_smi);
3176
3177 Label not_heap_number;
3178 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
3179 Heap::kHeapNumberMapRootIndex);
3180 __ j(not_equal, &not_heap_number, Label::kNear);
3181 __ Ret();
3182 __ bind(&not_heap_number);
3183
3184 Label not_string, slow_string;
3185 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
3186 // rax: object
3187 // rdi: object map
3188 __ j(above_equal, &not_string, Label::kNear);
3189 // Check if string has a cached array index.
3190 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3191 Immediate(String::kContainsCachedArrayIndexMask));
3192 __ j(not_zero, &slow_string, Label::kNear);
3193 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3194 __ IndexFromHash(rax, rax);
3195 __ Ret();
3196 __ bind(&slow_string);
3197 __ PopReturnAddressTo(rcx); // Pop return address.
3198 __ Push(rax); // Push argument.
3199 __ PushReturnAddressFrom(rcx); // Push return address.
3200 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
3201 __ bind(&not_string);
3202
3203 Label not_oddball;
3204 __ CmpInstanceType(rdi, ODDBALL_TYPE);
3205 __ j(not_equal, &not_oddball, Label::kNear);
3206 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
3207 __ Ret();
3208 __ bind(&not_oddball);
3209
3210 __ PopReturnAddressTo(rcx); // Pop return address.
3211 __ Push(rax); // Push argument.
3212 __ PushReturnAddressFrom(rcx); // Push return address.
3213 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
3214}
3215
3216
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003217void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3218 Register left,
3219 Register right,
3220 Register scratch1,
3221 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003222 Register length = scratch1;
3223
3224 // Compare lengths.
3225 Label check_zero_length;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226 __ movp(length, FieldOperand(left, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003227 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
3228 __ j(equal, &check_zero_length, Label::kNear);
3229 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3230 __ ret(0);
3231
3232 // Check if the length is zero.
3233 Label compare_chars;
3234 __ bind(&check_zero_length);
3235 STATIC_ASSERT(kSmiTag == 0);
3236 __ SmiTest(length);
3237 __ j(not_zero, &compare_chars, Label::kNear);
3238 __ Move(rax, Smi::FromInt(EQUAL));
3239 __ ret(0);
3240
3241 // Compare characters.
3242 __ bind(&compare_chars);
3243 Label strings_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003244 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3245 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003246
3247 // Characters are equal.
3248 __ Move(rax, Smi::FromInt(EQUAL));
3249 __ ret(0);
3250
3251 // Characters are not equal.
3252 __ bind(&strings_not_equal);
3253 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3254 __ ret(0);
3255}
3256
3257
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003258void StringHelper::GenerateCompareFlatOneByteStrings(
3259 MacroAssembler* masm, Register left, Register right, Register scratch1,
3260 Register scratch2, Register scratch3, Register scratch4) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003261 // Ensure that you can always subtract a string length from a non-negative
3262 // number (e.g. another length).
3263 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
3264
3265 // Find minimum length and length difference.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003266 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
3267 __ movp(scratch4, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003268 __ SmiSub(scratch4,
3269 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003270 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003271 // Register scratch4 now holds left.length - right.length.
3272 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00003273 Label left_shorter;
3274 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003275 // The right string isn't longer that the left one.
3276 // Get the right string's length by subtracting the (non-negative) difference
3277 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003278 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003279 __ bind(&left_shorter);
3280 // Register scratch1 now holds Min(left.length, right.length).
3281 const Register min_length = scratch1;
3282
Ben Murdoch257744e2011-11-30 15:57:28 +00003283 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003284 // If min-length is zero, go directly to comparing lengths.
3285 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00003286 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003287
Ben Murdoch257744e2011-11-30 15:57:28 +00003288 // Compare loop.
3289 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003290 GenerateOneByteCharsCompareLoop(
3291 masm, left, right, min_length, scratch2, &result_not_equal,
3292 // In debug-code mode, SmiTest below might push
3293 // the target label outside the near range.
3294 Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003295
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003296 // Completed loop without finding different characters.
3297 // Compare lengths (precomputed).
3298 __ bind(&compare_lengths);
3299 __ SmiTest(length_difference);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003300 Label length_not_equal;
3301 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003302
3303 // Result is EQUAL.
3304 __ Move(rax, Smi::FromInt(EQUAL));
3305 __ ret(0);
3306
Ben Murdoch257744e2011-11-30 15:57:28 +00003307 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003308 Label result_less;
3309 __ bind(&length_not_equal);
3310 __ j(greater, &result_greater, Label::kNear);
3311 __ jmp(&result_less, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003312 __ bind(&result_not_equal);
3313 // Unequal comparison of left to right, either character or length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003314 __ j(above, &result_greater, Label::kNear);
3315 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003316
3317 // Result is LESS.
3318 __ Move(rax, Smi::FromInt(LESS));
3319 __ ret(0);
3320
3321 // Result is GREATER.
3322 __ bind(&result_greater);
3323 __ Move(rax, Smi::FromInt(GREATER));
3324 __ ret(0);
3325}
3326
3327
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003328void StringHelper::GenerateOneByteCharsCompareLoop(
3329 MacroAssembler* masm, Register left, Register right, Register length,
3330 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003331 // Change index to run from -length to -1 by adding length to string
3332 // start. This means that loop ends when index reaches zero, which
3333 // doesn't need an additional compare.
3334 __ SmiToInteger32(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003335 __ leap(left,
3336 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
3337 __ leap(right,
3338 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
3339 __ negq(length);
Ben Murdoch257744e2011-11-30 15:57:28 +00003340 Register index = length; // index = -length;
3341
3342 // Compare loop.
3343 Label loop;
3344 __ bind(&loop);
3345 __ movb(scratch, Operand(left, index, times_1, 0));
3346 __ cmpb(scratch, Operand(right, index, times_1, 0));
3347 __ j(not_equal, chars_not_equal, near_jump);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003348 __ incq(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00003349 __ j(not_zero, &loop);
3350}
3351
3352
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003353void StringCompareStub::Generate(MacroAssembler* masm) {
3354 Label runtime;
3355
3356 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357 // rsp[0] : return address
3358 // rsp[8] : right string
3359 // rsp[16] : left string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003360
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003361 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3362 __ movp(rdx, args.GetArgumentOperand(0)); // left
3363 __ movp(rax, args.GetArgumentOperand(1)); // right
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003364
3365 // Check for identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003366 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367 __ cmpp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003368 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003369 __ Move(rax, Smi::FromInt(EQUAL));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003370 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01003371 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003372 __ ret(2 * kPointerSize);
3373
3374 __ bind(&not_same);
3375
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 // Check that both are sequential one-byte strings.
3377 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003378
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003379 // Inline comparison of one-byte strings.
Steve Block44f0eee2011-05-26 01:26:41 +01003380 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003381 // Drop arguments from the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003382 __ PopReturnAddressTo(rcx);
3383 __ addp(rsp, Immediate(2 * kPointerSize));
3384 __ PushReturnAddressFrom(rcx);
3385 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi,
3386 r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003387
3388 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3389 // tagged as a small integer.
3390 __ bind(&runtime);
3391 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3392}
3393
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003394
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003395void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3396 // ----------- S t a t e -------------
3397 // -- rdx : left
3398 // -- rax : right
3399 // -- rsp[0] : return address
3400 // -----------------------------------
3401
3402 // Load rcx with the allocation site. We stick an undefined dummy value here
3403 // and replace it with the real allocation site later when we instantiate this
3404 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3405 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
3406
3407 // Make sure that we actually patched the allocation site.
3408 if (FLAG_debug_code) {
3409 __ testb(rcx, Immediate(kSmiTagMask));
3410 __ Assert(not_equal, kExpectedAllocationSite);
3411 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
3412 isolate()->factory()->allocation_site_map());
3413 __ Assert(equal, kExpectedAllocationSite);
3414 }
3415
3416 // Tail call into the stub that handles binary operations with allocation
3417 // sites.
3418 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3419 __ TailCallStub(&stub);
3420}
3421
3422
3423void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3424 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00003425 Label miss;
3426 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003427
3428 if (GetCondition() == equal) {
3429 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003430 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003431 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003432 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003433 __ subp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003434 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003435 // Correct sign of result in case of overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003436 __ notp(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003437 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003438 __ movp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003439 }
3440 __ ret(0);
3441
3442 __ bind(&miss);
3443 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003444}
3445
3446
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003447void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3448 DCHECK(state() == CompareICState::NUMBER);
Steve Block1e0659c2011-05-24 12:43:12 +01003449
Ben Murdoch257744e2011-11-30 15:57:28 +00003450 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003451 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00003452 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01003453
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003454 if (left() == CompareICState::SMI) {
3455 __ JumpIfNotSmi(rdx, &miss);
3456 }
3457 if (right() == CompareICState::SMI) {
3458 __ JumpIfNotSmi(rax, &miss);
3459 }
3460
3461 // Load left and right operand.
3462 Label done, left, left_smi, right_smi;
3463 __ JumpIfSmi(rax, &right_smi, Label::kNear);
3464 __ CompareMap(rax, isolate()->factory()->heap_number_map());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003465 __ j(not_equal, &maybe_undefined1, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003466 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003467 __ jmp(&left, Label::kNear);
3468 __ bind(&right_smi);
3469 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
3470 __ Cvtlsi2sd(xmm1, rcx);
Steve Block1e0659c2011-05-24 12:43:12 +01003471
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003472 __ bind(&left);
3473 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
3474 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
3475 __ j(not_equal, &maybe_undefined2, Label::kNear);
3476 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3477 __ jmp(&done);
3478 __ bind(&left_smi);
3479 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
3480 __ Cvtlsi2sd(xmm0, rcx);
3481
3482 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01003483 // Compare operands
3484 __ ucomisd(xmm0, xmm1);
3485
3486 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00003487 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003488
3489 // Return a result of -1, 0, or 1, based on EFLAGS.
3490 // Performing mov, because xor would destroy the flag register.
3491 __ movl(rax, Immediate(0));
3492 __ movl(rcx, Immediate(0));
3493 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003494 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
Steve Block1e0659c2011-05-24 12:43:12 +01003495 __ ret(0);
3496
3497 __ bind(&unordered);
Steve Block1e0659c2011-05-24 12:43:12 +01003498 __ bind(&generic_stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003499 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3500 CompareICState::GENERIC, CompareICState::GENERIC);
Steve Block1e0659c2011-05-24 12:43:12 +01003501 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3502
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003503 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003504 if (Token::IsOrderedRelationalCompareOp(op())) {
3505 __ Cmp(rax, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003506 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003507 __ JumpIfSmi(rdx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003508 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
3509 __ j(not_equal, &maybe_undefined2, Label::kNear);
3510 __ jmp(&unordered);
3511 }
3512
3513 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003514 if (Token::IsOrderedRelationalCompareOp(op())) {
3515 __ Cmp(rdx, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003516 __ j(equal, &unordered);
3517 }
3518
Steve Block1e0659c2011-05-24 12:43:12 +01003519 __ bind(&miss);
3520 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003521}
3522
3523
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003524void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3525 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3526 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00003527
3528 // Registers containing left and right operands respectively.
3529 Register left = rdx;
3530 Register right = rax;
3531 Register tmp1 = rcx;
3532 Register tmp2 = rbx;
3533
3534 // Check that both operands are heap objects.
3535 Label miss;
3536 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3537 __ j(cond, &miss, Label::kNear);
3538
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003539 // Check that both operands are internalized strings.
3540 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3541 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3542 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3543 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3544 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3545 __ orp(tmp1, tmp2);
3546 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3547 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003548
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003549 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003550 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003551 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003552 // Make sure rax is non-zero. At this point input operands are
3553 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003554 DCHECK(right.is(rax));
Ben Murdoch257744e2011-11-30 15:57:28 +00003555 __ j(not_equal, &done, Label::kNear);
3556 STATIC_ASSERT(EQUAL == 0);
3557 STATIC_ASSERT(kSmiTag == 0);
3558 __ Move(rax, Smi::FromInt(EQUAL));
3559 __ bind(&done);
3560 __ ret(0);
3561
3562 __ bind(&miss);
3563 GenerateMiss(masm);
3564}
3565
3566
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003567void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3568 DCHECK(state() == CompareICState::UNIQUE_NAME);
3569 DCHECK(GetCondition() == equal);
3570
3571 // Registers containing left and right operands respectively.
3572 Register left = rdx;
3573 Register right = rax;
3574 Register tmp1 = rcx;
3575 Register tmp2 = rbx;
3576
3577 // Check that both operands are heap objects.
3578 Label miss;
3579 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3580 __ j(cond, &miss, Label::kNear);
3581
3582 // Check that both operands are unique names. This leaves the instance
3583 // types loaded in tmp1 and tmp2.
3584 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3585 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3586 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3587 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3588
3589 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3590 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3591
3592 // Unique names are compared by identity.
3593 Label done;
3594 __ cmpp(left, right);
3595 // Make sure rax is non-zero. At this point input operands are
3596 // guaranteed to be non-zero.
3597 DCHECK(right.is(rax));
3598 __ j(not_equal, &done, Label::kNear);
3599 STATIC_ASSERT(EQUAL == 0);
3600 STATIC_ASSERT(kSmiTag == 0);
3601 __ Move(rax, Smi::FromInt(EQUAL));
3602 __ bind(&done);
3603 __ ret(0);
3604
3605 __ bind(&miss);
3606 GenerateMiss(masm);
3607}
3608
3609
3610void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3611 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003612 Label miss;
3613
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003614 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003615
Ben Murdoch257744e2011-11-30 15:57:28 +00003616 // Registers containing left and right operands respectively.
3617 Register left = rdx;
3618 Register right = rax;
3619 Register tmp1 = rcx;
3620 Register tmp2 = rbx;
3621 Register tmp3 = rdi;
3622
3623 // Check that both operands are heap objects.
3624 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3625 __ j(cond, &miss);
3626
3627 // Check that both operands are strings. This leaves the instance
3628 // types loaded in tmp1 and tmp2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003629 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3630 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3631 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3632 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3633 __ movp(tmp3, tmp1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003634 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003635 __ orp(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003636 __ testb(tmp3, Immediate(kIsNotStringMask));
3637 __ j(not_zero, &miss);
3638
3639 // Fast check for identical strings.
3640 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003641 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003642 __ j(not_equal, &not_same, Label::kNear);
3643 STATIC_ASSERT(EQUAL == 0);
3644 STATIC_ASSERT(kSmiTag == 0);
3645 __ Move(rax, Smi::FromInt(EQUAL));
3646 __ ret(0);
3647
3648 // Handle not identical strings.
3649 __ bind(&not_same);
3650
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003651 // Check that both strings are internalized strings. If they are, we're done
3652 // because we already know they are not identical. We also know they are both
3653 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003654 if (equality) {
3655 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003656 STATIC_ASSERT(kInternalizedTag == 0);
3657 __ orp(tmp1, tmp2);
3658 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
3659 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003660 // Make sure rax is non-zero. At this point input operands are
3661 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003662 DCHECK(right.is(rax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003663 __ ret(0);
3664 __ bind(&do_compare);
3665 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003666
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003667 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003668 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003669 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003670
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003671 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003672 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003673 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3674 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003675 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003676 StringHelper::GenerateCompareFlatOneByteStrings(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003677 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3678 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003679
3680 // Handle more complex cases in runtime.
3681 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003682 __ PopReturnAddressTo(tmp1);
3683 __ Push(left);
3684 __ Push(right);
3685 __ PushReturnAddressFrom(tmp1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003686 if (equality) {
3687 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3688 } else {
3689 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3690 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003691
3692 __ bind(&miss);
3693 GenerateMiss(masm);
3694}
3695
3696
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003697void CompareICStub::GenerateObjects(MacroAssembler* masm) {
3698 DCHECK(state() == CompareICState::OBJECT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003699 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01003700 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003701 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003702
3703 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003704 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003705 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00003706 __ j(not_equal, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003708 DCHECK(GetCondition() == equal);
3709 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003710 __ ret(0);
3711
3712 __ bind(&miss);
3713 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003714}
3715
3716
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003717void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003718 Label miss;
3719 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3720 __ j(either_smi, &miss, Label::kNear);
3721
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003722 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
3723 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003724 __ Cmp(rcx, known_map_);
3725 __ j(not_equal, &miss, Label::kNear);
3726 __ Cmp(rbx, known_map_);
3727 __ j(not_equal, &miss, Label::kNear);
3728
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003729 __ subp(rax, rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003730 __ ret(0);
3731
3732 __ bind(&miss);
3733 GenerateMiss(masm);
3734}
3735
3736
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003737void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003738 {
3739 // Call the runtime system in a fresh internal frame.
3740 ExternalReference miss =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003741 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
Ben Murdochc7cc0282012-03-05 14:35:55 +00003742
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003743 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003744 __ Push(rdx);
3745 __ Push(rax);
3746 __ Push(rdx);
3747 __ Push(rax);
3748 __ Push(Smi::FromInt(op()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003749 __ CallExternalReference(miss, 3);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003750
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003751 // Compute the entry point of the rewritten stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003752 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3753 __ Pop(rax);
3754 __ Pop(rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003755 }
Steve Block1e0659c2011-05-24 12:43:12 +01003756
Steve Block1e0659c2011-05-24 12:43:12 +01003757 // Do a tail call to the rewritten stub.
3758 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003759}
3760
Steve Block1e0659c2011-05-24 12:43:12 +01003761
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003762void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3763 Label* miss,
3764 Label* done,
3765 Register properties,
3766 Handle<Name> name,
3767 Register r0) {
3768 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003769 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3770 // not equal to the name and kProbes-th slot is not used (its name is the
3771 // undefined value), it guarantees the hash table doesn't contain the
3772 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003773 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003774 for (int i = 0; i < kInlinedProbes; i++) {
3775 // r0 points to properties hash.
3776 // Compute the masked index: (hash + i + i * i) & mask.
3777 Register index = r0;
3778 // Capacity is smi 2^n.
3779 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3780 __ decl(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003781 __ andp(index,
3782 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003783
3784 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003785 DCHECK(NameDictionary::kEntrySize == 3);
3786 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003787
3788 Register entity_name = r0;
3789 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003790 DCHECK_EQ(kSmiTagSize, 1);
3791 __ movp(entity_name, Operand(properties,
Ben Murdoch257744e2011-11-30 15:57:28 +00003792 index,
3793 times_pointer_size,
3794 kElementsStartOffset - kHeapObjectTag));
3795 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3796 __ j(equal, done);
3797
3798 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003799 __ Cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003800 __ j(equal, miss);
3801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003802 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003803 // Check for the hole and skip.
3804 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003805 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003806
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003807 // Check if the entry name is not a unique name.
3808 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3809 __ JumpIfNotUniqueNameInstanceType(
3810 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3811 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003812 }
3813
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003814 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3815 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003816 __ Push(Handle<Object>(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003817 __ Push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003818 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003819 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003820 __ j(not_zero, miss);
3821 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003822}
3823
3824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003825// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003826// |done| label if a property with the given name is found leaving the
3827// index into the dictionary in |r1|. Jump to the |miss| label
3828// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003829void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3830 Label* miss,
3831 Label* done,
3832 Register elements,
3833 Register name,
3834 Register r0,
3835 Register r1) {
3836 DCHECK(!elements.is(r0));
3837 DCHECK(!elements.is(r1));
3838 DCHECK(!name.is(r0));
3839 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003840
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003841 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003842
3843 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3844 __ decl(r0);
3845
3846 for (int i = 0; i < kInlinedProbes; i++) {
3847 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003848 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3849 __ shrl(r1, Immediate(Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003850 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003851 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003852 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003853 __ andp(r1, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003854
3855 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003856 DCHECK(NameDictionary::kEntrySize == 3);
3857 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
Ben Murdoch257744e2011-11-30 15:57:28 +00003858
3859 // Check if the key is identical to the name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003860 __ cmpp(name, Operand(elements, r1, times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003861 kElementsStartOffset - kHeapObjectTag));
3862 __ j(equal, done);
3863 }
3864
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003865 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3866 POSITIVE_LOOKUP);
3867 __ Push(name);
3868 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3869 __ shrl(r0, Immediate(Name::kHashShift));
3870 __ Push(r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003871 __ CallStub(&stub);
3872
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003873 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003874 __ j(zero, miss);
3875 __ jmp(done);
3876}
3877
3878
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003880 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3881 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003882 // Stack frame on entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003883 // rsp[0 * kPointerSize] : return address.
3884 // rsp[1 * kPointerSize] : key's hash.
3885 // rsp[2 * kPointerSize] : key.
Ben Murdoch257744e2011-11-30 15:57:28 +00003886 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003887 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003888 // result_: used as scratch.
3889 // index_: will hold an index of entry if lookup is successful.
3890 // might alias with result_.
3891 // Returns:
3892 // result_ is zero if lookup failed, non zero otherwise.
3893
3894 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3895
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003896 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003897
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003898 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003899 __ decl(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003900 __ Push(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003901
3902 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3903 // not equal to the name and kProbes-th slot is not used (its name is the
3904 // undefined value), it guarantees the hash table doesn't contain the
3905 // property. It's true even if some slots represent deleted properties
3906 // (their names are the null value).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003907 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3908 kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003909 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3910 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003911 __ movp(scratch, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003912 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003913 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003914 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003915 __ andp(scratch, Operand(rsp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003916
3917 // Scale the index by multiplying by the entry size.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003918 DCHECK(NameDictionary::kEntrySize == 3);
3919 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003920
3921 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003922 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003923 kElementsStartOffset - kHeapObjectTag));
3924
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003925 __ Cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003926 __ j(equal, &not_in_dictionary);
3927
3928 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003929 __ cmpp(scratch, args.GetArgumentOperand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003930 __ j(equal, &in_dictionary);
3931
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003932 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3933 // If we hit a key that is not a unique name during negative
3934 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003935 // key we are looking for.
3936
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003937 // Check if the entry name is not a unique name.
3938 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3939 __ JumpIfNotUniqueNameInstanceType(
3940 FieldOperand(scratch, Map::kInstanceTypeOffset),
3941 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003942 }
3943 }
3944
3945 __ bind(&maybe_in_dictionary);
3946 // If we are doing negative lookup then probing failure should be
3947 // treated as a lookup success. For positive lookup probing failure
3948 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003949 if (mode() == POSITIVE_LOOKUP) {
3950 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003951 __ Drop(1);
3952 __ ret(2 * kPointerSize);
3953 }
3954
3955 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003956 __ movp(scratch, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003957 __ Drop(1);
3958 __ ret(2 * kPointerSize);
3959
3960 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003961 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003962 __ Drop(1);
3963 __ ret(2 * kPointerSize);
3964}
3965
3966
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003967void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3968 Isolate* isolate) {
3969 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3970 stub1.GetCode();
3971 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3972 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003973}
3974
3975
3976// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3977// the value has just been written into the object, now this stub makes sure
3978// we keep the GC informed. The word in the object where the value has been
3979// written is in the address register.
3980void RecordWriteStub::Generate(MacroAssembler* masm) {
3981 Label skip_to_incremental_noncompacting;
3982 Label skip_to_incremental_compacting;
3983
3984 // The first two instructions are generated with labels so as to get the
3985 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3986 // forth between a compare instructions (a nop in this position) and the
3987 // real branch when we start and stop incremental heap marking.
3988 // See RecordWriteStub::Patch for details.
3989 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3990 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3991
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003992 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3993 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003994 MacroAssembler::kReturnAtEnd);
3995 } else {
3996 __ ret(0);
3997 }
3998
3999 __ bind(&skip_to_incremental_noncompacting);
4000 GenerateIncremental(masm, INCREMENTAL);
4001
4002 __ bind(&skip_to_incremental_compacting);
4003 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4004
4005 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4006 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4007 masm->set_byte_at(0, kTwoByteNopInstruction);
4008 masm->set_byte_at(2, kFiveByteNopInstruction);
4009}
4010
4011
4012void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4013 regs_.Save(masm);
4014
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004015 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004016 Label dont_need_remembered_set;
4017
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004018 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004019 __ JumpIfNotInNewSpace(regs_.scratch0(),
4020 regs_.scratch0(),
4021 &dont_need_remembered_set);
4022
4023 __ CheckPageFlag(regs_.object(),
4024 regs_.scratch0(),
4025 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4026 not_zero,
4027 &dont_need_remembered_set);
4028
4029 // First notify the incremental marker if necessary, then update the
4030 // remembered set.
4031 CheckNeedsToInformIncrementalMarker(
4032 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004033 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004034 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004035 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004036 MacroAssembler::kReturnAtEnd);
4037
4038 __ bind(&dont_need_remembered_set);
4039 }
4040
4041 CheckNeedsToInformIncrementalMarker(
4042 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004043 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004044 regs_.Restore(masm);
4045 __ ret(0);
4046}
4047
4048
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004049void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4050 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004051 Register address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004052 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
4053 DCHECK(!address.is(regs_.object()));
4054 DCHECK(!address.is(arg_reg_1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004055 __ Move(address, regs_.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004056 __ Move(arg_reg_1, regs_.object());
4057 // TODO(gc) Can we just set address arg2 in the beginning?
4058 __ Move(arg_reg_2, address);
4059 __ LoadAddress(arg_reg_3,
4060 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004061 int argument_count = 3;
4062
4063 AllowExternalCallThatCantCauseGC scope(masm);
4064 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004065 __ CallCFunction(
4066 ExternalReference::incremental_marking_record_write_function(isolate()),
4067 argument_count);
4068 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004069}
4070
4071
4072void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4073 MacroAssembler* masm,
4074 OnNoNeedToInformIncrementalMarker on_no_need,
4075 Mode mode) {
4076 Label on_black;
4077 Label need_incremental;
4078 Label need_incremental_pop_object;
4079
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004080 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4081 __ andp(regs_.scratch0(), regs_.object());
4082 __ movp(regs_.scratch1(),
4083 Operand(regs_.scratch0(),
4084 MemoryChunk::kWriteBarrierCounterOffset));
4085 __ subp(regs_.scratch1(), Immediate(1));
4086 __ movp(Operand(regs_.scratch0(),
4087 MemoryChunk::kWriteBarrierCounterOffset),
4088 regs_.scratch1());
4089 __ j(negative, &need_incremental);
4090
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004091 // Let's look at the color of the object: If it is not black we don't have
4092 // to inform the incremental marker.
4093 __ JumpIfBlack(regs_.object(),
4094 regs_.scratch0(),
4095 regs_.scratch1(),
4096 &on_black,
4097 Label::kNear);
4098
4099 regs_.Restore(masm);
4100 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004101 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004102 MacroAssembler::kReturnAtEnd);
4103 } else {
4104 __ ret(0);
4105 }
4106
4107 __ bind(&on_black);
4108
4109 // Get the value from the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004110 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004111
4112 if (mode == INCREMENTAL_COMPACTION) {
4113 Label ensure_not_white;
4114
4115 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4116 regs_.scratch1(), // Scratch.
4117 MemoryChunk::kEvacuationCandidateMask,
4118 zero,
4119 &ensure_not_white,
4120 Label::kNear);
4121
4122 __ CheckPageFlag(regs_.object(),
4123 regs_.scratch1(), // Scratch.
4124 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4125 zero,
4126 &need_incremental);
4127
4128 __ bind(&ensure_not_white);
4129 }
4130
4131 // We need an extra register for this, so we push the object register
4132 // temporarily.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004133 __ Push(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004134 __ EnsureNotWhite(regs_.scratch0(), // The value.
4135 regs_.scratch1(), // Scratch.
4136 regs_.object(), // Scratch.
4137 &need_incremental_pop_object,
4138 Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004139 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004140
4141 regs_.Restore(masm);
4142 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004143 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004144 MacroAssembler::kReturnAtEnd);
4145 } else {
4146 __ ret(0);
4147 }
4148
4149 __ bind(&need_incremental_pop_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004150 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004151
4152 __ bind(&need_incremental);
4153
4154 // Fall through when we need to inform the incremental marker.
4155}
4156
4157
4158void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4159 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004160 // -- rax : element value to store
4161 // -- rcx : element index as smi
4162 // -- rsp[0] : return address
4163 // -- rsp[8] : array literal index in function
4164 // -- rsp[16] : array literal
4165 // clobbers rbx, rdx, rdi
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004166 // -----------------------------------
4167
4168 Label element_done;
4169 Label double_elements;
4170 Label smi_element;
4171 Label slow_elements;
4172 Label fast_elements;
4173
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004174 // Get array literal index, array literal and its map.
4175 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4176 __ movp(rdx, args.GetArgumentOperand(1));
4177 __ movp(rbx, args.GetArgumentOperand(0));
4178 __ movp(rdi, FieldOperand(rbx, JSObject::kMapOffset));
4179
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004180 __ CheckFastElements(rdi, &double_elements);
4181
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004182 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004183 __ JumpIfSmi(rax, &smi_element);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004184 __ CheckFastSmiElements(rdi, &fast_elements);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004185
4186 // Store into the array literal requires a elements transition. Call into
4187 // the runtime.
4188
4189 __ bind(&slow_elements);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004190 __ PopReturnAddressTo(rdi);
4191 __ Push(rbx);
4192 __ Push(rcx);
4193 __ Push(rax);
4194 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4195 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
4196 __ Push(rdx);
4197 __ PushReturnAddressFrom(rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004198 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4199
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004200 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004201 __ bind(&fast_elements);
4202 __ SmiToInteger32(kScratchRegister, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004203 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4204 __ leap(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004205 FixedArrayBase::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004206 __ movp(Operand(rcx, 0), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004207 // Update the write barrier for the array store.
4208 __ RecordWrite(rbx, rcx, rax,
4209 kDontSaveFPRegs,
4210 EMIT_REMEMBERED_SET,
4211 OMIT_SMI_CHECK);
4212 __ ret(0);
4213
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004214 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
4215 // FAST_*_ELEMENTS, and value is Smi.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004216 __ bind(&smi_element);
4217 __ SmiToInteger32(kScratchRegister, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004218 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4219 __ movp(FieldOperand(rbx, kScratchRegister, times_pointer_size,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004220 FixedArrayBase::kHeaderSize), rax);
4221 __ ret(0);
4222
4223 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
4224 __ bind(&double_elements);
4225
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004226 __ movp(r9, FieldOperand(rbx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004227 __ SmiToInteger32(r11, rcx);
4228 __ StoreNumberToDoubleElements(rax,
4229 r9,
4230 r11,
4231 xmm0,
4232 &slow_elements);
4233 __ ret(0);
4234}
4235
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004236
4237void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4238 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4239 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
4240 int parameter_count_offset =
4241 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4242 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
4243 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4244 __ PopReturnAddressTo(rcx);
4245 int additional_offset =
4246 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
4247 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
4248 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
4249}
4250
4251
4252void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4253 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4254 VectorLoadStub stub(isolate(), state());
4255 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4256}
4257
4258
4259void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4260 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4261 VectorKeyedLoadStub stub(isolate());
4262 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4263}
4264
4265
4266void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4267 if (masm->isolate()->function_entry_hook() != NULL) {
4268 ProfileEntryHookStub stub(masm->isolate());
4269 masm->CallStub(&stub);
4270 }
4271}
4272
4273
4274void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4275 // This stub can be called from essentially anywhere, so it needs to save
4276 // all volatile and callee-save registers.
4277 const size_t kNumSavedRegisters = 2;
4278 __ pushq(arg_reg_1);
4279 __ pushq(arg_reg_2);
4280
4281 // Calculate the original stack pointer and store it in the second arg.
4282 __ leap(arg_reg_2,
4283 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4284
4285 // Calculate the function address to the first arg.
4286 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4287 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4288
4289 // Save the remainder of the volatile registers.
4290 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4291
4292 // Call the entry hook function.
4293 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4294 Assembler::RelocInfoNone());
4295
4296 AllowExternalCallThatCantCauseGC scope(masm);
4297
4298 const int kArgumentCount = 2;
4299 __ PrepareCallCFunction(kArgumentCount);
4300 __ CallCFunction(rax, kArgumentCount);
4301
4302 // Restore volatile regs.
4303 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4304 __ popq(arg_reg_2);
4305 __ popq(arg_reg_1);
4306
4307 __ Ret();
4308}
4309
4310
4311template<class T>
4312static void CreateArrayDispatch(MacroAssembler* masm,
4313 AllocationSiteOverrideMode mode) {
4314 if (mode == DISABLE_ALLOCATION_SITES) {
4315 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4316 __ TailCallStub(&stub);
4317 } else if (mode == DONT_OVERRIDE) {
4318 int last_index = GetSequenceIndexFromFastElementsKind(
4319 TERMINAL_FAST_ELEMENTS_KIND);
4320 for (int i = 0; i <= last_index; ++i) {
4321 Label next;
4322 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4323 __ cmpl(rdx, Immediate(kind));
4324 __ j(not_equal, &next);
4325 T stub(masm->isolate(), kind);
4326 __ TailCallStub(&stub);
4327 __ bind(&next);
4328 }
4329
4330 // If we reached this point there is a problem.
4331 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4332 } else {
4333 UNREACHABLE();
4334 }
4335}
4336
4337
4338static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4339 AllocationSiteOverrideMode mode) {
4340 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4341 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4342 // rax - number of arguments
4343 // rdi - constructor?
4344 // rsp[0] - return address
4345 // rsp[8] - last argument
4346 Handle<Object> undefined_sentinel(
4347 masm->isolate()->heap()->undefined_value(),
4348 masm->isolate());
4349
4350 Label normal_sequence;
4351 if (mode == DONT_OVERRIDE) {
4352 DCHECK(FAST_SMI_ELEMENTS == 0);
4353 DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1);
4354 DCHECK(FAST_ELEMENTS == 2);
4355 DCHECK(FAST_HOLEY_ELEMENTS == 3);
4356 DCHECK(FAST_DOUBLE_ELEMENTS == 4);
4357 DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4358
4359 // is the low bit set? If so, we are holey and that is good.
4360 __ testb(rdx, Immediate(1));
4361 __ j(not_zero, &normal_sequence);
4362 }
4363
4364 // look at the first argument
4365 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4366 __ movp(rcx, args.GetArgumentOperand(0));
4367 __ testp(rcx, rcx);
4368 __ j(zero, &normal_sequence);
4369
4370 if (mode == DISABLE_ALLOCATION_SITES) {
4371 ElementsKind initial = GetInitialFastElementsKind();
4372 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4373
4374 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4375 holey_initial,
4376 DISABLE_ALLOCATION_SITES);
4377 __ TailCallStub(&stub_holey);
4378
4379 __ bind(&normal_sequence);
4380 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4381 initial,
4382 DISABLE_ALLOCATION_SITES);
4383 __ TailCallStub(&stub);
4384 } else if (mode == DONT_OVERRIDE) {
4385 // We are going to create a holey array, but our kind is non-holey.
4386 // Fix kind and retry (only if we have an allocation site in the slot).
4387 __ incl(rdx);
4388
4389 if (FLAG_debug_code) {
4390 Handle<Map> allocation_site_map =
4391 masm->isolate()->factory()->allocation_site_map();
4392 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4393 __ Assert(equal, kExpectedAllocationSite);
4394 }
4395
4396 // Save the resulting elements kind in type info. We can't just store r3
4397 // in the AllocationSite::transition_info field because elements kind is
4398 // restricted to a portion of the field...upper bits need to be left alone.
4399 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4400 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4401 Smi::FromInt(kFastElementsKindPackedToHoley));
4402
4403 __ bind(&normal_sequence);
4404 int last_index = GetSequenceIndexFromFastElementsKind(
4405 TERMINAL_FAST_ELEMENTS_KIND);
4406 for (int i = 0; i <= last_index; ++i) {
4407 Label next;
4408 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4409 __ cmpl(rdx, Immediate(kind));
4410 __ j(not_equal, &next);
4411 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4412 __ TailCallStub(&stub);
4413 __ bind(&next);
4414 }
4415
4416 // If we reached this point there is a problem.
4417 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4418 } else {
4419 UNREACHABLE();
4420 }
4421}
4422
4423
4424template<class T>
4425static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4426 int to_index = GetSequenceIndexFromFastElementsKind(
4427 TERMINAL_FAST_ELEMENTS_KIND);
4428 for (int i = 0; i <= to_index; ++i) {
4429 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4430 T stub(isolate, kind);
4431 stub.GetCode();
4432 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4433 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4434 stub1.GetCode();
4435 }
4436 }
4437}
4438
4439
4440void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4441 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4442 isolate);
4443 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4444 isolate);
4445 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4446 isolate);
4447}
4448
4449
4450void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4451 Isolate* isolate) {
4452 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4453 for (int i = 0; i < 2; i++) {
4454 // For internal arrays we only need a few things
4455 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4456 stubh1.GetCode();
4457 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4458 stubh2.GetCode();
4459 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4460 stubh3.GetCode();
4461 }
4462}
4463
4464
4465void ArrayConstructorStub::GenerateDispatchToArrayStub(
4466 MacroAssembler* masm,
4467 AllocationSiteOverrideMode mode) {
4468 if (argument_count() == ANY) {
4469 Label not_zero_case, not_one_case;
4470 __ testp(rax, rax);
4471 __ j(not_zero, &not_zero_case);
4472 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4473
4474 __ bind(&not_zero_case);
4475 __ cmpl(rax, Immediate(1));
4476 __ j(greater, &not_one_case);
4477 CreateArrayDispatchOneArgument(masm, mode);
4478
4479 __ bind(&not_one_case);
4480 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4481 } else if (argument_count() == NONE) {
4482 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4483 } else if (argument_count() == ONE) {
4484 CreateArrayDispatchOneArgument(masm, mode);
4485 } else if (argument_count() == MORE_THAN_ONE) {
4486 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4487 } else {
4488 UNREACHABLE();
4489 }
4490}
4491
4492
4493void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4494 // ----------- S t a t e -------------
4495 // -- rax : argc
4496 // -- rbx : AllocationSite or undefined
4497 // -- rdi : constructor
4498 // -- rsp[0] : return address
4499 // -- rsp[8] : last argument
4500 // -----------------------------------
4501 if (FLAG_debug_code) {
4502 // The array construct code is only set for the global and natives
4503 // builtin Array functions which always have maps.
4504
4505 // Initial map for the builtin Array function should be a map.
4506 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4507 // Will both indicate a NULL and a Smi.
4508 STATIC_ASSERT(kSmiTag == 0);
4509 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4510 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4511 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4512 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4513
4514 // We should either have undefined in rbx or a valid AllocationSite
4515 __ AssertUndefinedOrAllocationSite(rbx);
4516 }
4517
4518 Label no_info;
4519 // If the feedback vector is the undefined value call an array constructor
4520 // that doesn't use AllocationSites.
4521 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4522 __ j(equal, &no_info);
4523
4524 // Only look at the lower 16 bits of the transition info.
4525 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4526 __ SmiToInteger32(rdx, rdx);
4527 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4528 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4529 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4530
4531 __ bind(&no_info);
4532 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4533}
4534
4535
4536void InternalArrayConstructorStub::GenerateCase(
4537 MacroAssembler* masm, ElementsKind kind) {
4538 Label not_zero_case, not_one_case;
4539 Label normal_sequence;
4540
4541 __ testp(rax, rax);
4542 __ j(not_zero, &not_zero_case);
4543 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4544 __ TailCallStub(&stub0);
4545
4546 __ bind(&not_zero_case);
4547 __ cmpl(rax, Immediate(1));
4548 __ j(greater, &not_one_case);
4549
4550 if (IsFastPackedElementsKind(kind)) {
4551 // We might need to create a holey array
4552 // look at the first argument
4553 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4554 __ movp(rcx, args.GetArgumentOperand(0));
4555 __ testp(rcx, rcx);
4556 __ j(zero, &normal_sequence);
4557
4558 InternalArraySingleArgumentConstructorStub
4559 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4560 __ TailCallStub(&stub1_holey);
4561 }
4562
4563 __ bind(&normal_sequence);
4564 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4565 __ TailCallStub(&stub1);
4566
4567 __ bind(&not_one_case);
4568 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4569 __ TailCallStub(&stubN);
4570}
4571
4572
4573void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4574 // ----------- S t a t e -------------
4575 // -- rax : argc
4576 // -- rdi : constructor
4577 // -- rsp[0] : return address
4578 // -- rsp[8] : last argument
4579 // -----------------------------------
4580
4581 if (FLAG_debug_code) {
4582 // The array construct code is only set for the global and natives
4583 // builtin Array functions which always have maps.
4584
4585 // Initial map for the builtin Array function should be a map.
4586 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4587 // Will both indicate a NULL and a Smi.
4588 STATIC_ASSERT(kSmiTag == 0);
4589 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4590 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4591 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4592 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4593 }
4594
4595 // Figure out the right elements kind
4596 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4597
4598 // Load the map's "bit field 2" into |result|. We only need the first byte,
4599 // but the following masking takes care of that anyway.
4600 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4601 // Retrieve elements_kind from bit field 2.
4602 __ DecodeField<Map::ElementsKindBits>(rcx);
4603
4604 if (FLAG_debug_code) {
4605 Label done;
4606 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4607 __ j(equal, &done);
4608 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4609 __ Assert(equal,
4610 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4611 __ bind(&done);
4612 }
4613
4614 Label fast_elements_case;
4615 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4616 __ j(equal, &fast_elements_case);
4617 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4618
4619 __ bind(&fast_elements_case);
4620 GenerateCase(masm, FAST_ELEMENTS);
4621}
4622
4623
4624void CallApiFunctionStub::Generate(MacroAssembler* masm) {
4625 // ----------- S t a t e -------------
4626 // -- rax : callee
4627 // -- rbx : call_data
4628 // -- rcx : holder
4629 // -- rdx : api_function_address
4630 // -- rsi : context
4631 // --
4632 // -- rsp[0] : return address
4633 // -- rsp[8] : last argument
4634 // -- ...
4635 // -- rsp[argc * 8] : first argument
4636 // -- rsp[(argc + 1) * 8] : receiver
4637 // -----------------------------------
4638
4639 Register callee = rax;
4640 Register call_data = rbx;
4641 Register holder = rcx;
4642 Register api_function_address = rdx;
4643 Register return_address = rdi;
4644 Register context = rsi;
4645
4646 int argc = this->argc();
4647 bool is_store = this->is_store();
4648 bool call_data_undefined = this->call_data_undefined();
4649
4650 typedef FunctionCallbackArguments FCA;
4651
4652 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4653 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4654 STATIC_ASSERT(FCA::kDataIndex == 4);
4655 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4656 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4657 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4658 STATIC_ASSERT(FCA::kHolderIndex == 0);
4659 STATIC_ASSERT(FCA::kArgsLength == 7);
4660
4661 __ PopReturnAddressTo(return_address);
4662
4663 // context save
4664 __ Push(context);
4665 // load context from callee
4666 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
4667
4668 // callee
4669 __ Push(callee);
4670
4671 // call data
4672 __ Push(call_data);
4673 Register scratch = call_data;
4674 if (!call_data_undefined) {
4675 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4676 }
4677 // return value
4678 __ Push(scratch);
4679 // return value default
4680 __ Push(scratch);
4681 // isolate
4682 __ Move(scratch,
4683 ExternalReference::isolate_address(isolate()));
4684 __ Push(scratch);
4685 // holder
4686 __ Push(holder);
4687
4688 __ movp(scratch, rsp);
4689 // Push return address back on stack.
4690 __ PushReturnAddressFrom(return_address);
4691
4692 // Allocate the v8::Arguments structure in the arguments' space since
4693 // it's not controlled by GC.
4694 const int kApiStackSpace = 4;
4695
4696 __ PrepareCallApiFunction(kApiStackSpace);
4697
4698 // FunctionCallbackInfo::implicit_args_.
4699 __ movp(StackSpaceOperand(0), scratch);
4700 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
4701 __ movp(StackSpaceOperand(1), scratch); // FunctionCallbackInfo::values_.
4702 __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_.
4703 // FunctionCallbackInfo::is_construct_call_.
4704 __ Set(StackSpaceOperand(3), 0);
4705
4706#if defined(__MINGW64__) || defined(_WIN64)
4707 Register arguments_arg = rcx;
4708 Register callback_arg = rdx;
4709#else
4710 Register arguments_arg = rdi;
4711 Register callback_arg = rsi;
4712#endif
4713
4714 // It's okay if api_function_address == callback_arg
4715 // but not arguments_arg
4716 DCHECK(!api_function_address.is(arguments_arg));
4717
4718 // v8::InvocationCallback's argument.
4719 __ leap(arguments_arg, StackSpaceOperand(0));
4720
4721 ExternalReference thunk_ref =
4722 ExternalReference::invoke_function_callback(isolate());
4723
4724 // Accessor for FunctionCallbackInfo and first js arg.
4725 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
4726 ARGUMENTS_DONT_CONTAIN_RECEIVER);
4727 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
4728 FCA::kArgsLength - FCA::kContextSaveIndex);
4729 // Stores return the first js argument
4730 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
4731 is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
4732 __ CallApiFunctionAndReturn(
4733 api_function_address,
4734 thunk_ref,
4735 callback_arg,
4736 argc + FCA::kArgsLength + 1,
4737 return_value_operand,
4738 &context_restore_operand);
4739}
4740
4741
4742void CallApiGetterStub::Generate(MacroAssembler* masm) {
4743 // ----------- S t a t e -------------
4744 // -- rsp[0] : return address
4745 // -- rsp[8] : name
4746 // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object
4747 // -- ...
4748 // -- r8 : api_function_address
4749 // -----------------------------------
4750
4751#if defined(__MINGW64__) || defined(_WIN64)
4752 Register getter_arg = r8;
4753 Register accessor_info_arg = rdx;
4754 Register name_arg = rcx;
4755#else
4756 Register getter_arg = rdx;
4757 Register accessor_info_arg = rsi;
4758 Register name_arg = rdi;
4759#endif
4760 Register api_function_address = ApiGetterDescriptor::function_address();
4761 DCHECK(api_function_address.is(r8));
4762 Register scratch = rax;
4763
4764 // v8::Arguments::values_ and handler for name.
4765 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
4766
4767 // Allocate v8::AccessorInfo in non-GCed stack space.
4768 const int kArgStackSpace = 1;
4769
4770 __ leap(name_arg, Operand(rsp, kPCOnStackSize));
4771
4772 __ PrepareCallApiFunction(kArgStackSpace);
4773 __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
4774
4775 // v8::PropertyAccessorInfo::args_.
4776 __ movp(StackSpaceOperand(0), scratch);
4777
4778 // The context register (rsi) has been saved in PrepareCallApiFunction and
4779 // could be used to pass arguments.
4780 __ leap(accessor_info_arg, StackSpaceOperand(0));
4781
4782 ExternalReference thunk_ref =
4783 ExternalReference::invoke_accessor_getter_callback(isolate());
4784
4785 // It's okay if api_function_address == getter_arg
4786 // but not accessor_info_arg or name_arg
4787 DCHECK(!api_function_address.is(accessor_info_arg) &&
4788 !api_function_address.is(name_arg));
4789
4790 // The name handler is counted as an argument.
4791 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
4792 Operand return_value_operand = args.GetArgumentOperand(
4793 PropertyCallbackArguments::kArgsLength - 1 -
4794 PropertyCallbackArguments::kReturnValueOffset);
4795 __ CallApiFunctionAndReturn(api_function_address,
4796 thunk_ref,
4797 getter_arg,
4798 kStackSpace,
4799 return_value_operand,
4800 NULL);
4801}
4802
4803
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004804#undef __
4805
4806} } // namespace v8::internal
4807
4808#endif // V8_TARGET_ARCH_X64