blob: f314b9cfcb6ecea4438a8b7cc0f7e68539401c8a [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/bootstrapper.h"
8#include "src/code-stubs.h"
9#include "src/codegen.h"
10#include "src/ic/handler-compiler.h"
11#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/regexp/jsregexp.h"
15#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/x64/code-stubs-x64.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010018
19namespace v8 {
20namespace internal {
21
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022
23static void InitializeArrayConstructorDescriptor(
24 Isolate* isolate, CodeStubDescriptor* descriptor,
25 int constant_stack_parameter_count) {
26 Address deopt_handler = Runtime::FunctionForId(
27 Runtime::kArrayConstructor)->entry;
28
29 if (constant_stack_parameter_count == 0) {
30 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
31 JS_FUNCTION_STUB_MODE);
32 } else {
33 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035 }
36}
37
38
39static void InitializeInternalArrayConstructorDescriptor(
40 Isolate* isolate, CodeStubDescriptor* descriptor,
41 int constant_stack_parameter_count) {
42 Address deopt_handler = Runtime::FunctionForId(
43 Runtime::kInternalArrayConstructor)->entry;
44
45 if (constant_stack_parameter_count == 0) {
46 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
47 JS_FUNCTION_STUB_MODE);
48 } else {
49 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 }
52}
53
54
55void ArrayNoArgumentConstructorStub::InitializeDescriptor(
56 CodeStubDescriptor* descriptor) {
57 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
58}
59
60
61void ArraySingleArgumentConstructorStub::InitializeDescriptor(
62 CodeStubDescriptor* descriptor) {
63 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
64}
65
66
67void ArrayNArgumentsConstructorStub::InitializeDescriptor(
68 CodeStubDescriptor* descriptor) {
69 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
70}
71
72
73void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
76}
77
78
79void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
82}
83
84
85void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
86 CodeStubDescriptor* descriptor) {
87 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
88}
89
90
Kristian Monsen80d68ea2010-09-08 11:05:35 +010091#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010092
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
94void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
95 ExternalReference miss) {
96 // Update the static counter each time a new code stub is generated.
97 isolate()->counters()->code_stubs()->Increment();
98
99 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101 {
102 // Call the runtime system in a fresh internal frame.
103 FrameScope scope(masm, StackFrame::INTERNAL);
104 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000105 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 // Push arguments
107 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000108 __ Push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109 }
110 __ CallExternalReference(miss, param_count);
111 }
112
Steve Block1e0659c2011-05-24 12:43:12 +0100113 __ Ret();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000114}
115
116
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100117void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100119 const int argument_count = 1;
120 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 __ LoadAddress(arg_reg_1,
122 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100123
124 AllowExternalCallThatCantCauseGC scope(masm);
125 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100127 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100129 __ ret(0);
130}
131
132
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133class FloatingPointHelper : public AllStatic {
134 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 enum ConvertUndefined {
136 CONVERT_UNDEFINED_TO_ZERO,
137 BAILOUT_ON_UNDEFINED
138 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100139 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
140 // If the operands are not both numbers, jump to not_numbers.
141 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
142 // NumberOperands assumes both are smis or heap numbers.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100143 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
144 Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100145};
146
147
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000148void DoubleToIStub::Generate(MacroAssembler* masm) {
149 Register input_reg = this->source();
150 Register final_result_reg = this->destination();
151 DCHECK(is_truncating());
Ben Murdoch257744e2011-11-30 15:57:28 +0000152
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 Label check_negative, process_64_bits, done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000154
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 int double_offset = offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000156
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 // Account for return address and saved regs if input is rsp.
158 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000159
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
161 MemOperand exponent_operand(MemOperand(input_reg,
162 double_offset + kDoubleSize / 2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000163
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164 Register scratch1;
165 Register scratch_candidates[3] = { rbx, rdx, rdi };
166 for (int i = 0; i < 3; i++) {
167 scratch1 = scratch_candidates[i];
168 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
Steve Block1e0659c2011-05-24 12:43:12 +0100169 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100170
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 // Since we must use rcx for shifts below, use some other register (rax)
172 // to calculate the result if ecx is the requested return register.
173 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
174 // Save ecx if it isn't the return register and therefore volatile, or if it
175 // is the return register, then save the temp register we use in its stead
176 // for the result.
177 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
178 __ pushq(scratch1);
179 __ pushq(save_reg);
180
181 bool stash_exponent_copy = !input_reg.is(rsp);
182 __ movl(scratch1, mantissa_operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183 __ Movsd(xmm0, mantissa_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 __ movl(rcx, exponent_operand);
185 if (stash_exponent_copy) __ pushq(rcx);
186
187 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
188 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
189 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
190 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
191 __ j(below, &process_64_bits);
192
193 // Result is entirely in lower 32-bits of mantissa
194 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
195 __ subl(rcx, Immediate(delta));
196 __ xorl(result_reg, result_reg);
197 __ cmpl(rcx, Immediate(31));
198 __ j(above, &done);
199 __ shll_cl(scratch1);
200 __ jmp(&check_negative);
201
202 __ bind(&process_64_bits);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 __ Cvttsd2siq(result_reg, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 __ jmp(&done, Label::kNear);
205
206 // If the double was negative, negate the integer result.
207 __ bind(&check_negative);
208 __ movl(result_reg, scratch1);
209 __ negl(result_reg);
210 if (stash_exponent_copy) {
211 __ cmpl(MemOperand(rsp, 0), Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100212 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 __ cmpl(exponent_operand, Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100214 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000215 __ cmovl(greater, result_reg, scratch1);
Steve Block1e0659c2011-05-24 12:43:12 +0100216
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 // Restore registers
Ben Murdochb0fe1622011-05-05 13:52:32 +0100218 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219 if (stash_exponent_copy) {
220 __ addp(rsp, Immediate(kDoubleSize));
221 }
222 if (!final_result_reg.is(result_reg)) {
223 DCHECK(final_result_reg.is(rcx));
224 __ movl(final_result_reg, result_reg);
225 }
226 __ popq(save_reg);
227 __ popq(scratch1);
228 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100229}
230
231
232void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
233 Label* not_numbers) {
234 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
235 // Load operand in rdx into xmm0, or branch to not_numbers.
236 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
237 __ JumpIfSmi(rdx, &load_smi_rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100239 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100241 // Load operand in rax into xmm1, or branch to not_numbers.
242 __ JumpIfSmi(rax, &load_smi_rax);
243
244 __ bind(&load_nonsmi_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100246 __ j(not_equal, not_numbers);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100248 __ jmp(&done);
249
250 __ bind(&load_smi_rdx);
251 __ SmiToInteger32(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 __ Cvtlsi2sd(xmm0, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100253 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
254
255 __ bind(&load_smi_rax);
256 __ SmiToInteger32(kScratchRegister, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257 __ Cvtlsi2sd(xmm1, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100258 __ bind(&done);
259}
260
261
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100262void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263 const Register exponent = MathPowTaggedDescriptor::exponent();
264 DCHECK(exponent.is(rdx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 const Register base = rax;
266 const Register scratch = rcx;
267 const XMMRegister double_result = xmm3;
268 const XMMRegister double_base = xmm2;
269 const XMMRegister double_exponent = xmm1;
270 const XMMRegister double_scratch = xmm4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100271
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100272 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100273
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100274 // Save 1 in double_result - we need this several times later on.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 __ movp(scratch, Immediate(1));
276 __ Cvtlsi2sd(double_result, scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100277
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100279 Label base_is_smi, unpack_exponent;
280 // The exponent and base are supplied as arguments on the stack.
281 // This can only happen if the stub is called from non-optimized code.
282 // Load input parameters from stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
284 __ movp(base, args.GetArgumentOperand(0));
285 __ movp(exponent, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100286 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
287 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
288 Heap::kHeapNumberMapRootIndex);
289 __ j(not_equal, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291 __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100292 __ jmp(&unpack_exponent, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100293
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100294 __ bind(&base_is_smi);
295 __ SmiToInteger32(base, base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296 __ Cvtlsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100297 __ bind(&unpack_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100298
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100299 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
300 __ SmiToInteger32(exponent, exponent);
301 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100302
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 __ bind(&exponent_not_smi);
304 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
305 Heap::kHeapNumberMapRootIndex);
306 __ j(not_equal, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100309 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
310 __ SmiToInteger32(exponent, exponent);
311 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100312
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313 __ bind(&exponent_not_smi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000314 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100315 }
316
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 if (exponent_type() != INTEGER) {
318 Label fast_power, try_arithmetic_simplification;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100319 // Detect integer exponents stored as double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 __ DoubleToI(exponent, double_exponent, double_scratch,
321 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
322 &try_arithmetic_simplification,
323 &try_arithmetic_simplification);
324 __ jmp(&int_exponent);
325
326 __ bind(&try_arithmetic_simplification);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327 __ Cvttsd2si(exponent, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100328 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 __ cmpl(exponent, Immediate(0x1));
330 __ j(overflow, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100331
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000332 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333 // Detect square root case. Crankshaft detects constant +/-0.5 at
334 // compile time and uses DoMathPowHalf instead. We then skip this check
335 // for non-constant cases of +/-0.5 as these hardly occur.
336 Label continue_sqrt, continue_rsqrt, not_plus_half;
337 // Test for 0.5.
338 // Load double_scratch with 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000339 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 __ Movq(double_scratch, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100341 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100343 __ j(not_equal, &not_plus_half, Label::kNear);
344
345 // Calculates square root of base. Check for the special case of
346 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
347 // According to IEEE-754, double-precision -Infinity has the highest
348 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 __ Movq(double_scratch, scratch);
351 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352 // Comparing -Infinity with NaN results in "unordered", which sets the
353 // zero flag as if both were equal. However, it also sets the carry flag.
354 __ j(not_equal, &continue_sqrt, Label::kNear);
355 __ j(carry, &continue_sqrt, Label::kNear);
356
357 // Set result to Infinity in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 __ Xorpd(double_result, double_result);
359 __ Subsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100360 __ jmp(&done);
361
362 __ bind(&continue_sqrt);
363 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 __ Xorpd(double_scratch, double_scratch);
365 __ Addsd(double_scratch, double_base); // Convert -0 to 0.
366 __ Sqrtsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100367 __ jmp(&done);
368
369 // Test for -0.5.
370 __ bind(&not_plus_half);
371 // Load double_scratch with -0.5 by substracting 1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000372 __ Subsd(double_scratch, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100373 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100375 __ j(not_equal, &fast_power, Label::kNear);
376
377 // Calculates reciprocal of square root of base. Check for the special
378 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
379 // According to IEEE-754, double-precision -Infinity has the highest
380 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382 __ Movq(double_scratch, scratch);
383 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100384 // Comparing -Infinity with NaN results in "unordered", which sets the
385 // zero flag as if both were equal. However, it also sets the carry flag.
386 __ j(not_equal, &continue_rsqrt, Label::kNear);
387 __ j(carry, &continue_rsqrt, Label::kNear);
388
389 // Set result to 0 in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000390 __ Xorpd(double_result, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100391 __ jmp(&done);
392
393 __ bind(&continue_rsqrt);
394 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 __ Xorpd(double_exponent, double_exponent);
396 __ Addsd(double_exponent, double_base); // Convert -0 to +0.
397 __ Sqrtsd(double_exponent, double_exponent);
398 __ Divsd(double_result, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100399 __ jmp(&done);
400 }
401
402 // Using FPU instructions to calculate power.
403 Label fast_power_failed;
404 __ bind(&fast_power);
405 __ fnclex(); // Clear flags to catch exceptions later.
406 // Transfer (B)ase and (E)xponent onto the FPU register stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408 __ Movsd(Operand(rsp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100409 __ fld_d(Operand(rsp, 0)); // E
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410 __ Movsd(Operand(rsp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100411 __ fld_d(Operand(rsp, 0)); // B, E
412
413 // Exponent is in st(1) and base is in st(0)
414 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
415 // FYL2X calculates st(1) * log2(st(0))
416 __ fyl2x(); // X
417 __ fld(0); // X, X
418 __ frndint(); // rnd(X), X
419 __ fsub(1); // rnd(X), X-rnd(X)
420 __ fxch(1); // X - rnd(X), rnd(X)
421 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
422 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
423 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100425 // FSCALE calculates st(0) * 2^st(1)
426 __ fscale(); // 2^X, rnd(X)
427 __ fstp(1);
428 // Bail out to runtime in case of exceptions in the status word.
429 __ fnstsw_ax();
430 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
431 __ j(not_zero, &fast_power_failed, Label::kNear);
432 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 __ Movsd(double_result, Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 __ jmp(&done);
436
437 __ bind(&fast_power_failed);
438 __ fninit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ jmp(&call_runtime);
441 }
442
443 // Calculate power with integer exponent.
444 __ bind(&int_exponent);
445 const XMMRegister double_scratch2 = double_exponent;
446 // Back up exponent as we need to check if exponent is negative later.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 __ movp(scratch, exponent); // Back up exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 __ Movsd(double_scratch, double_base); // Back up base.
449 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100450
451 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 __ testl(scratch, scratch);
454 __ j(positive, &no_neg, Label::kNear);
455 __ negl(scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100456 __ bind(&no_neg);
457
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 __ j(zero, &while_false, Label::kNear);
459 __ shrl(scratch, Immediate(1));
460 // Above condition means CF==0 && ZF==0. This means that the
461 // bit that has been shifted out is 0 and the result is not 0.
462 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000463 __ Movsd(double_result, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000464 __ j(zero, &while_false, Label::kNear);
465
Ben Murdoch85b71792012-04-11 18:30:58 +0100466 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 __ shrl(scratch, Immediate(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468 __ Mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470 __ Mulsd(double_result, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100471 __ j(not_zero, &while_true);
472
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100474 // If the exponent is negative, return 1/result.
475 __ testl(exponent, exponent);
476 __ j(greater, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477 __ Divsd(double_scratch2, double_result);
478 __ Movsd(double_result, double_scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100479 // Test whether result is zero. Bail out to check for subnormal result.
480 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481 __ Xorpd(double_scratch2, double_scratch2);
482 __ Ucomisd(double_scratch2, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100483 // double_exponent aliased as double_scratch2 has already been overwritten
484 // and may not have contained the exponent value in the first place when the
485 // input was a smi. We reset it with exponent value before bailing out.
486 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000487 __ Cvtlsi2sd(double_exponent, exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100488
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100489 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100491 // The arguments are still on the stack.
492 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100494
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100495 // The stub is called from non-optimized code, which expects the result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 // as heap number in rax.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100497 __ bind(&done);
498 __ AllocateHeapNumber(rax, rcx, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000499 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100500 __ ret(2 * kPointerSize);
501 } else {
502 __ bind(&call_runtime);
503 // Move base to the correct argument register. Exponent is already in xmm1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000504 __ Movsd(xmm0, double_base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 DCHECK(double_exponent.is(xmm1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100506 {
507 AllowExternalCallThatCantCauseGC scope(masm);
508 __ PrepareCallCFunction(2);
509 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 ExternalReference::power_double_double_function(isolate()), 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100511 }
512 // Return value is in xmm0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513 __ Movsd(double_result, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100514
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100515 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100516 __ ret(0);
517 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100518}
519
520
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
522 Label miss;
523 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400524 // Ensure that the vector and slot registers won't be clobbered before
525 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
527 LoadDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000528
529 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
530 r9, &miss);
531 __ bind(&miss);
532 PropertyAccessCompiler::TailCallBuiltin(
533 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
534}
535
536
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000537void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
538 // Return address is on the stack.
539 Label slow;
540
541 Register receiver = LoadDescriptor::ReceiverRegister();
542 Register key = LoadDescriptor::NameRegister();
543 Register scratch = rax;
544 DCHECK(!scratch.is(receiver) && !scratch.is(key));
545
546 // Check that the key is an array index, that is Uint32.
547 STATIC_ASSERT(kSmiValueSize <= 32);
548 __ JumpUnlessNonNegativeSmi(key, &slow);
549
550 // Everything is fine, call runtime.
551 __ PopReturnAddressTo(scratch);
552 __ Push(receiver); // receiver
553 __ Push(key); // key
554 __ PushReturnAddressFrom(scratch);
555
556 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558
559 __ bind(&slow);
560 PropertyAccessCompiler::TailCallBuiltin(
561 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000562}
563
564
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400565void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
566 // Return address is on the stack.
567 Label miss;
568
569 Register receiver = LoadDescriptor::ReceiverRegister();
570 Register index = LoadDescriptor::NameRegister();
571 Register scratch = rdi;
572 Register result = rax;
573 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
575 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400576
577 // StringCharAtGenerator doesn't use the result register until it's passed
578 // the different miss possibilities. If it did, we would have a conflict
579 // when FLAG_vector_ics is true.
580 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
581 &miss, // When not a string.
582 &miss, // When not a number.
583 &miss, // When index out of range.
584 STRING_INDEX_IS_ARRAY_INDEX,
585 RECEIVER_IS_STRING);
586 char_at_generator.GenerateFast(masm);
587 __ ret(0);
588
589 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400591
592 __ bind(&miss);
593 PropertyAccessCompiler::TailCallBuiltin(
594 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
595}
596
597
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100598void RegExpExecStub::Generate(MacroAssembler* masm) {
599 // Just jump directly to runtime if native RegExp is not selected at compile
600 // time or if regexp entry in generated code is turned off runtime switch or
601 // at compilation.
602#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000603 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100604#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100605
606 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 // rsp[0] : return address
608 // rsp[8] : last_match_info (expected JSArray)
609 // rsp[16] : previous index
610 // rsp[24] : subject string
611 // rsp[32] : JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100612
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000613 enum RegExpExecStubArgumentIndices {
614 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
615 SUBJECT_STRING_ARGUMENT_INDEX,
616 PREVIOUS_INDEX_ARGUMENT_INDEX,
617 LAST_MATCH_INFO_ARGUMENT_INDEX,
618 REG_EXP_EXEC_ARGUMENT_COUNT
619 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100620
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
622 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100623 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100624 // Ensure that a RegExp stack is allocated.
625 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100627 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100629 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000630 __ testp(kScratchRegister, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100631 __ j(zero, &runtime);
632
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100633 // Check that the first argument is a JSRegExp object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100635 __ JumpIfSmi(rax, &runtime);
636 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
637 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100639 // Check that the RegExp has been compiled (data contains a fixed array).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100641 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100642 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100643 __ Check(NegateCondition(is_smi),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 kUnexpectedTypeForRegExpDataFixedArrayExpected);
Steve Block44f0eee2011-05-26 01:26:41 +0100645 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000646 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100647 }
648
Steve Block44f0eee2011-05-26 01:26:41 +0100649 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100650 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +0100651 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100652 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
653 __ j(not_equal, &runtime);
654
Steve Block44f0eee2011-05-26 01:26:41 +0100655 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100656 // Check that the number of captures fit in the static offsets vector buffer.
657 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +0100658 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659 // Check (number_of_captures + 1) * 2 <= offsets vector size
660 // Or number_of_captures <= offsets vector size / 2 - 1
661 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
662 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100663 __ j(above, &runtime);
664
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000665 // Reset offset for possibly sliced string.
666 __ Set(r14, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
668 __ JumpIfSmi(rdi, &runtime);
669 __ movp(r15, rdi); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 // rax: RegExp data (FixedArray)
671 // rdi: subject string
672 // r15: subject string
673 // Handle subject string according to its encoding and representation:
674 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100675 // (2) Sequential one byte? If yes, go to (5).
676 // (3) Sequential or cons? If not, go to (6).
677 // (4) Cons string. If the string is flat, replace subject with first string
678 // and go to (1). Otherwise bail out to runtime.
679 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680 // (E) Carry on.
681 /// [...]
682
683 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 // (6) Long external string? If not, go to (10).
685 // (7) External string. Make it, offset-wise, look like a sequential string.
686 // (8) Is the external string one byte? If yes, go to (5).
687 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000688 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000690
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
692 external_string /* 7 */, check_underlying /* 1 */,
693 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
694
695 __ bind(&check_underlying);
696 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
697 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698
699 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100700 __ andb(rbx, Immediate(kIsNotStringMask |
701 kStringRepresentationMask |
702 kStringEncodingMask |
703 kShortExternalStringMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100704 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 __ j(zero, &seq_two_byte_string); // Go to (9).
706
Ben Murdoch097c5b22016-05-18 11:27:45 +0100707 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100709 __ andb(rbx, Immediate(kIsNotStringMask |
710 kStringRepresentationMask |
711 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100712 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100713
Ben Murdoch097c5b22016-05-18 11:27:45 +0100714 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 // We check whether the subject string is a cons, since sequential strings
716 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000717 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
718 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100719 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
720 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 __ cmpp(rbx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100722 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100723
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 // (4) Cons string. Check that it's flat.
725 // Replace subject with first string and reload instance type.
Steve Block44f0eee2011-05-26 01:26:41 +0100726 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727 Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100728 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100730 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000731
Ben Murdoch097c5b22016-05-18 11:27:45 +0100732 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000733 __ bind(&seq_one_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100734 // rax: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
736 __ Set(rcx, 1); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100737
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000738 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100739 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 // r11: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100741 // Check that the irregexp code has been generated for the actual string
742 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +0000743 // smi (code flushing support)
744 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100745
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746 // rdi: sequential subject string (or look-alike, external string)
747 // r15: original subject string
748 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100749 // r11: code
750 // Load used arguments before starting to push arguments for call to native
751 // RegExp code to avoid handling changing stack height.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 // We have to use r15 instead of rdi to load the length because rdi might
753 // have been only made to look like a sequential string when it actually
754 // is an external string.
755 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
756 __ JumpIfNotSmi(rbx, &runtime);
757 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
758 __ j(above_equal, &runtime);
759 __ SmiToInteger64(rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100760
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100761 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100762 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100764 // r11: code
765 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +0100767 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100768
Steve Block44f0eee2011-05-26 01:26:41 +0100769 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000770 static const int kRegExpExecuteArguments = 9;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100771 int argument_slots_on_stack =
772 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100773 __ EnterApiExitFrame(argument_slots_on_stack);
774
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 // Argument 9: Pass current isolate address.
776 __ LoadAddress(kScratchRegister,
777 ExternalReference::isolate_address(isolate()));
778 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100779 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100780
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781 // Argument 8: Indicate that this is a direct call from JavaScript.
782 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100783 Immediate(1));
784
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 // Argument 7: Start (high end) of backtracking stack memory area.
786 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
787 __ movp(r9, Operand(kScratchRegister, 0));
788 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
789 __ addp(r9, Operand(kScratchRegister, 0));
790 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
791
792 // Argument 6: Set the number of capture registers to zero to force global
793 // regexps to behave as non-global. This does not affect non-global regexps.
794 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100795#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
797 Immediate(0));
798#else
799 __ Set(r9, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100800#endif
801
802 // Argument 5: static offsets vector buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000803 __ LoadAddress(
804 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100805 // Argument 5 passed in r8 on Linux and on the stack on Windows.
806#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100808#endif
809
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100810 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100811 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000812 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100813 // r11: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000814 // r14: slice offset
815 // r15: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100816
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100817 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818 __ movp(arg_reg_2, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100819
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000820 // Argument 4: End of string data
821 // Argument 3: Start of string data
822 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
823 // Prepare start and end index of the input.
824 // Load the length from the original sliced string if that is the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000825 __ addp(rbx, r14);
826 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
827 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000828
829 // rbx: start index of the input
830 // r14: end index of the input
831 // r15: original subject string
832 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
833 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 __ leap(arg_reg_4,
835 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
836 __ leap(arg_reg_3,
837 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000838 __ jmp(&setup_rest, Label::kNear);
839 __ bind(&setup_two_byte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000840 __ leap(arg_reg_4,
841 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
842 __ leap(arg_reg_3,
843 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000844 __ bind(&setup_rest);
845
846 // Argument 1: Original subject string.
847 // The original subject is in the previous stack frame. Therefore we have to
848 // use rbp, which points exactly to one pointer size below the previous rsp.
849 // (Because creating a new stack frame pushes the previous rbp onto the stack
850 // and thereby moves up rsp by one kPointerSize.)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 __ movp(arg_reg_1, r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100852
853 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100855 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100856
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000857 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100858
859 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +0000860 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100861 Label exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 __ cmpl(rax, Immediate(1));
863 // We expect exactly one result since we force the called regexp to behave
864 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +0000865 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100866 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100867 __ j(equal, &exception);
868 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
869 // If none of the above, it can only be retry.
870 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100871 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100872
873 // For failure return null.
874 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000875 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100876
877 // Load RegExp data.
878 __ bind(&success);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
880 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100881 __ SmiToInteger32(rax,
882 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
883 // Calculate number of capture registers (number_of_captures + 1) * 2.
884 __ leal(rdx, Operand(rax, rax, times_1, 2));
885
886 // rdx: Number of capture registers
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887 // Check that the fourth object is a JSArray object.
888 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
889 __ JumpIfSmi(r15, &runtime);
890 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
891 __ j(not_equal, &runtime);
892 // Check that the JSArray is in fast case.
893 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
894 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
895 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
896 __ j(not_equal, &runtime);
897 // Check that the last match info has space for the capture registers and the
898 // additional information. Ensure no overflow in add.
899 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
900 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
901 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
902 __ cmpl(rdx, rax);
903 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100904
905 // rbx: last_match_info backing store (FixedArray)
906 // rdx: number of capture registers
907 // Store the capture count.
908 __ Integer32ToSmi(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100910 kScratchRegister);
911 // Store last subject and last input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000912 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
913 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
914 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100915 __ RecordWriteField(rbx,
916 RegExpImpl::kLastSubjectOffset,
917 rax,
918 rdi,
919 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 __ movp(rax, rcx);
921 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100922 __ RecordWriteField(rbx,
923 RegExpImpl::kLastInputOffset,
924 rax,
925 rdi,
926 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100927
928 // Get the static offsets vector filled by the native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 __ LoadAddress(
930 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100931
932 // rbx: last_match_info backing store (FixedArray)
933 // rcx: offsets vector
934 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000935 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100936 // Capture register counter starts from number of capture registers and
937 // counts down until wraping after zero.
938 __ bind(&next_capture);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000939 __ subp(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000940 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100941 // Read the value from the static offsets vector buffer and make it a smi.
942 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100943 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100944 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 __ movp(FieldOperand(rbx,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100946 rdx,
947 times_pointer_size,
948 RegExpImpl::kFirstCaptureOffset),
949 rdi);
950 __ jmp(&next_capture);
951 __ bind(&done);
952
953 // Return last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954 __ movp(rax, r15);
955 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100956
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100957 __ bind(&exception);
958 // Result must now be exception. If there is no pending exception already a
959 // stack overflow (on the backtrack stack) was detected in RegExp code but
960 // haven't created the exception yet. Handle that in the runtime system.
961 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +0100962 ExternalReference pending_exception_address(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000963 Isolate::kPendingExceptionAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100964 Operand pending_exception_operand =
965 masm->ExternalOperand(pending_exception_address, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966 __ movp(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100967 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968 __ cmpp(rax, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100969 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100970
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000971 // For exception, throw the exception again.
972 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100973
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000974 // Do the runtime call to execute the regexp.
975 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000976 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000977
978 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000980 __ bind(&not_seq_nor_cons);
981 // Compare flags are still set from (3).
982 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
983
Ben Murdoch097c5b22016-05-18 11:27:45 +0100984 // (7) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100985 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000986 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100987 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
988 if (FLAG_debug_code) {
989 // Assert that we do not have a cons or slice (indirect strings) here.
990 // Sequential strings have already been ruled out.
991 __ testb(rbx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100993 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100995 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000996 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
997 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100998 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100999 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001000 __ testb(rbx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001001 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001002
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001003 // rdi: subject string (flat two-byte)
1004 // rax: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +01001005 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 __ bind(&seq_two_byte_string);
1007 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1008 __ Set(rcx, 0); // Type is two byte.
1009 __ jmp(&check_code); // Go to (E).
1010
1011 // (10) Not a string or a short external string? If yes, bail out to runtime.
1012 __ bind(&not_long_external);
1013 // Catch non-string subject or short external string.
1014 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1015 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
1016 __ j(not_zero, &runtime);
1017
Ben Murdoch097c5b22016-05-18 11:27:45 +01001018 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 // Load offset into r14 and replace subject string with parent.
1020 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1021 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1022 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001023#endif // V8_INTERPRETED_REGEXP
1024}
1025
1026
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001027static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 DCHECK(cc != equal);
1029 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001030 || (cc == greater) || (cc == greater_equal));
1031 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1032}
1033
1034
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035static void CheckInputType(MacroAssembler* masm, Register input,
1036 CompareICState::State expected, Label* fail) {
1037 Label ok;
1038 if (expected == CompareICState::SMI) {
1039 __ JumpIfNotSmi(input, fail);
1040 } else if (expected == CompareICState::NUMBER) {
1041 __ JumpIfSmi(input, &ok);
1042 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1043 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001044 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001045 // We could be strict about internalized/non-internalized here, but as long as
1046 // hydrogen doesn't care, the stub doesn't have to care either.
1047 __ bind(&ok);
1048}
1049
1050
1051static void BranchIfNotInternalizedString(MacroAssembler* masm,
1052 Label* label,
1053 Register object,
1054 Register scratch) {
1055 __ JumpIfSmi(object, label);
1056 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1057 __ movzxbp(scratch,
1058 FieldOperand(scratch, Map::kInstanceTypeOffset));
1059 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1060 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1061 __ j(not_zero, label);
1062}
1063
1064
1065void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001066 Label runtime_call, check_unequal_objects, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001067 Condition cc = GetCondition();
1068 Factory* factory = isolate()->factory();
1069
1070 Label miss;
1071 CheckInputType(masm, rdx, left(), &miss);
1072 CheckInputType(masm, rax, right(), &miss);
1073
1074 // Compare two smis.
1075 Label non_smi, smi_done;
1076 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1077 __ subp(rdx, rax);
1078 __ j(no_overflow, &smi_done);
1079 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1080 __ bind(&smi_done);
1081 __ movp(rax, rdx);
1082 __ ret(0);
1083 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001084
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001085 // The compare stub returns a positive, negative, or zero 64-bit integer
1086 // value in rax, corresponding to result of comparing the two inputs.
1087 // NOTICE! This code is only reached after a smi-fast-case check, so
1088 // it is certain that at least one operand isn't a smi.
1089
1090 // Two identical objects are equal unless they are both NaN or undefined.
1091 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001092 Label not_identical;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 __ cmpp(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001094 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001095
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001096 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001097 // Check for undefined. undefined OP undefined is false even though
1098 // undefined == undefined.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001099 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001100 Label check_for_nan;
1101 __ j(not_equal, &check_for_nan, Label::kNear);
1102 __ Set(rax, NegativeComparisonResult(cc));
1103 __ ret(0);
1104 __ bind(&check_for_nan);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001105 }
1106
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001108 // so we do the second best thing - test it ourselves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 Label heap_number;
1110 // If it's not a heap number, then return equal for (in)equality operator.
1111 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1112 factory->heap_number_map());
1113 __ j(equal, &heap_number, Label::kNear);
1114 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1116 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001117 // Call runtime on identical objects. Otherwise return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
1119 __ j(above_equal, &runtime_call, Label::kFar);
1120 // Call runtime on identical symbols since we need to throw a TypeError.
1121 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
1122 __ j(equal, &runtime_call, Label::kFar);
1123 // Call runtime on identical SIMD values since we must throw a TypeError.
1124 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
1125 __ j(equal, &runtime_call, Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001126 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 __ Set(rax, EQUAL);
1128 __ ret(0);
1129
1130 __ bind(&heap_number);
1131 // It is a heap number, so return equal if it's not NaN.
1132 // For NaN, return 1 for every condition except greater and
1133 // greater-equal. Return -1 for them, so the comparison yields
1134 // false for all conditions except not-equal.
1135 __ Set(rax, EQUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001136 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1137 __ Ucomisd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001138 __ setcc(parity_even, rax);
1139 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1140 if (cc == greater_equal || cc == greater) {
1141 __ negp(rax);
1142 }
1143 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001144
1145 __ bind(&not_identical);
1146 }
1147
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 if (cc == equal) { // Both strict and non-strict.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001149 Label slow; // Fallthrough label.
1150
1151 // If we're doing a strict equality comparison, we don't have to do
1152 // type conversion, so we generate code to do fast comparison for objects
1153 // and oddballs. Non-smi numbers and strings still go through the usual
1154 // slow-case code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001156 // If either is a Smi (we know that not both are), then they can only
1157 // be equal if the other is a HeapNumber. If so, use the slow case.
1158 {
1159 Label not_smis;
1160 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1161
1162 // Check if the non-smi operand is a heap number.
1163 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001164 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001165 // If heap number, handle it in the slow case.
1166 __ j(equal, &slow);
1167 // Return non-equal. ebx (the lower half of rbx) is not zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 __ movp(rax, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001169 __ ret(0);
1170
1171 __ bind(&not_smis);
1172 }
1173
1174 // If either operand is a JSObject or an oddball value, then they are not
1175 // equal since their pointers are different
1176 // There is no test for undetectability in strict equality.
1177
1178 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001179 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001180 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001181 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001182 __ j(below, &first_non_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001183 // Return non-zero (rax (not rax) is not zero)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001184 Label return_not_equal;
1185 STATIC_ASSERT(kHeapObjectTag != 0);
1186 __ bind(&return_not_equal);
1187 __ ret(0);
1188
1189 __ bind(&first_non_object);
1190 // Check for oddballs: true, false, null, undefined.
1191 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1192 __ j(equal, &return_not_equal);
1193
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001195 __ j(above_equal, &return_not_equal);
1196
1197 // Check for oddballs: true, false, null, undefined.
1198 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1199 __ j(equal, &return_not_equal);
1200
1201 // Fall through to the general case.
1202 }
1203 __ bind(&slow);
1204 }
1205
1206 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001207 Label non_number_comparison;
1208 Label unordered;
1209 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1210 __ xorl(rax, rax);
1211 __ xorl(rcx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001212 __ Ucomisd(xmm0, xmm1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001213
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 // Don't base result on EFLAGS when a NaN is involved.
1215 __ j(parity_even, &unordered, Label::kNear);
1216 // Return a result of -1, 0, or 1, based on EFLAGS.
1217 __ setcc(above, rax);
1218 __ setcc(below, rcx);
1219 __ subp(rax, rcx);
1220 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001221
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001222 // If one of the numbers was NaN, then the result is always false.
1223 // The cc is never not-equal.
1224 __ bind(&unordered);
1225 DCHECK(cc != not_equal);
1226 if (cc == less || cc == less_equal) {
1227 __ Set(rax, 1);
1228 } else {
1229 __ Set(rax, -1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001230 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233 // The number comparison code did not provide a valid result.
1234 __ bind(&non_number_comparison);
1235
1236 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001237 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 if (cc == equal) {
1239 BranchIfNotInternalizedString(
1240 masm, &check_for_strings, rax, kScratchRegister);
1241 BranchIfNotInternalizedString(
1242 masm, &check_for_strings, rdx, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001243
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001244 // We've already checked for object identity, so if both operands are
1245 // internalized strings they aren't equal. Register rax (not rax) already
1246 // holds a non-zero value, which indicates not equal, so just return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001247 __ ret(0);
1248 }
1249
1250 __ bind(&check_for_strings);
1251
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1253 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001254
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255 // Inline comparison of one-byte strings.
1256 if (cc == equal) {
1257 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001258 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1260 rdi, r8);
Ben Murdoch257744e2011-11-30 15:57:28 +00001261 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001262
1263#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001265#endif
1266
1267 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001268 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001269 // Not strict equality. Objects are unequal if
1270 // they are both JSObjects and not undetectable,
1271 // and their pointers are different.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001272 Label return_unequal, undetectable;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001273 // At most one is a smi, so we can test for smi by adding the two.
1274 // A smi plus a heap object has the low bit set, a heap object plus
1275 // a heap object has the low bit clear.
1276 STATIC_ASSERT(kSmiTag == 0);
1277 STATIC_ASSERT(kSmiTagMask == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 __ leap(rcx, Operand(rax, rdx, times_1, 0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001279 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 __ j(not_zero, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001281
1282 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1283 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001284 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1285 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001286 __ j(not_zero, &undetectable);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001287 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1288 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001289 __ j(not_zero, &return_unequal);
1290
1291 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
1292 __ j(below, &runtime_call, Label::kNear);
1293 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
1294 __ j(below, &runtime_call, Label::kNear);
1295
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001296 __ bind(&return_unequal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001297 // Return non-equal by returning the non-zero object pointer in rax.
1298 __ ret(0);
1299
1300 __ bind(&undetectable);
1301 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1302 Immediate(1 << Map::kIsUndetectable));
1303 __ j(zero, &return_unequal);
1304 __ Set(rax, EQUAL);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001305 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001306 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001308
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001310 {
1311 FrameScope scope(masm, StackFrame::INTERNAL);
1312 __ Push(rdx);
1313 __ Push(rax);
1314 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1315 }
1316 // Turn true into 0 and false into some non-zero value.
1317 STATIC_ASSERT(EQUAL == 0);
1318 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1319 __ subp(rax, rdx);
1320 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001321 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001322 // Push arguments below the return address to prepare jump to builtin.
1323 __ PopReturnAddressTo(rcx);
1324 __ Push(rdx);
1325 __ Push(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001326 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001327 __ PushReturnAddressFrom(rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001328 __ TailCallRuntime(Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001329 }
1330
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 __ bind(&miss);
1332 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001333}
1334
1335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1337 // rax : number of arguments to the construct function
1338 // rbx : feedback vector
1339 // rdx : slot in feedback vector (Smi)
1340 // rdi : the function to call
1341 FrameScope scope(masm, StackFrame::INTERNAL);
1342
1343 // Number-of-arguments register must be smi-tagged to call out.
1344 __ Integer32ToSmi(rax, rax);
1345 __ Push(rax);
1346 __ Push(rdi);
1347 __ Integer32ToSmi(rdx, rdx);
1348 __ Push(rdx);
1349 __ Push(rbx);
1350
1351 __ CallStub(stub);
1352
1353 __ Pop(rbx);
1354 __ Pop(rdx);
1355 __ Pop(rdi);
1356 __ Pop(rax);
1357 __ SmiToInteger32(rax, rax);
1358}
1359
1360
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001361static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001362 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001363 // are uninitialized, monomorphic (indicated by a JSFunction), and
1364 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365 // rax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001366 // rbx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001367 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001368 // rdi : the function to call
1369 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001370 Label initialize, done, miss, megamorphic, not_array_function,
1371 done_no_smi_convert;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001372
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373 // Load the cache state into r11.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001374 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 __ movp(r11,
1376 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001377
1378 // A monomorphic cache hit or an already megamorphic state: invoke the
1379 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001380 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1381 // at this position in a symbol (see static asserts in
1382 // type-feedback-vector.h).
1383 Label check_allocation_site;
1384 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1385 __ j(equal, &done, Label::kFar);
1386 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1387 __ j(equal, &done, Label::kFar);
1388 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1389 Heap::kWeakCellMapRootIndex);
1390 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 // If the weak cell is cleared, we have a new chance to become monomorphic.
1393 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1394 __ j(equal, &initialize);
1395 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001397 __ bind(&check_allocation_site);
1398 // If we came here, we need to see if we are the array function.
1399 // If we didn't have a matching function, and we didn't find the megamorph
1400 // sentinel, then we have in the slot either some other function or an
1401 // AllocationSite.
1402 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1403 __ j(not_equal, &miss);
1404
1405 // Make sure the function is the Array() function
1406 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1407 __ cmpp(rdi, r11);
1408 __ j(not_equal, &megamorphic);
1409 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001410
1411 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001412
1413 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1414 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001417 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1418 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 __ bind(&megamorphic);
1420 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1421 TypeFeedbackVector::MegamorphicSentinel(isolate));
1422 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 // An uninitialized cache is patched with the function or sentinel to
1425 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001426 __ bind(&initialize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 // Make sure the function is the Array() function
1429 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1430 __ cmpp(rdi, r11);
1431 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001432
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433 CreateAllocationSiteStub create_stub(isolate);
1434 CallStubInRecordCallTarget(masm, &create_stub);
1435 __ jmp(&done_no_smi_convert);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001436
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 __ bind(&not_array_function);
1438 CreateWeakCellStub weak_cell_stub(isolate);
1439 CallStubInRecordCallTarget(masm, &weak_cell_stub);
1440 __ jmp(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001441
1442 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001443 __ Integer32ToSmi(rdx, rdx);
1444
1445 __ bind(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001446}
1447
1448
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001449void CallConstructStub::Generate(MacroAssembler* masm) {
1450 // rax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 // rbx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001453 // rdi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001454
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001455 Label non_function;
1456 // Check that the constructor is not a smi.
1457 __ JumpIfSmi(rdi, &non_function);
1458 // Check that constructor is a JSFunction.
1459 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1460 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001461
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001463
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001464 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465 Label feedback_register_initialized;
1466 // Put the AllocationSite from the feedback vector into rbx, or undefined.
1467 __ movp(rbx,
1468 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1469 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
1470 __ j(equal, &feedback_register_initialized, Label::kNear);
1471 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1472 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001473
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 __ AssertUndefinedOrAllocationSite(rbx);
1475
1476 // Pass new target to construct stub.
1477 __ movp(rdx, rdi);
1478
1479 // Tail call to the function-specific construct stub (still in the caller
1480 // context at this point).
1481 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1482 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
1483 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
1484 __ jmp(rcx);
1485
1486 __ bind(&non_function);
1487 __ movp(rdx, rdi);
1488 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1489}
1490
1491
1492void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1493 // rdi - function
1494 // rdx - slot id
1495 // rbx - vector
1496 // rcx - allocation site (loaded from vector[slot]).
1497 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1498 __ cmpp(rdi, r8);
1499 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001500
1501 __ movp(rax, Immediate(arg_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001502
1503 // Increment the call count for monomorphic function calls.
1504 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1505 FixedArray::kHeaderSize + kPointerSize),
1506 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507
1508 __ movp(rbx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509 __ movp(rdx, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001510 ArrayConstructorStub stub(masm->isolate(), arg_count());
1511 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512}
1513
1514
1515void CallICStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 // ----------- S t a t e -------------
1517 // -- rdi - function
1518 // -- rdx - slot id
1519 // -- rbx - vector
1520 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001521 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 int argc = arg_count();
1524 StackArgumentsAccessor args(rsp, argc);
1525 ParameterCount actual(argc);
1526
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001527 // The checks. First, does rdi match the recorded monomorphic target?
1528 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001529 __ movp(rcx,
1530 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1531
1532 // We don't know that we have a weak cell. We might have a private symbol
1533 // or an AllocationSite, but the memory is safe to examine.
1534 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1535 // FixedArray.
1536 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1537 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1538 // computed, meaning that it can't appear to be a pointer. If the low bit is
1539 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1540 // to be a pointer.
1541 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1542 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1543 WeakCell::kValueOffset &&
1544 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1545
1546 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001547 __ j(not_equal, &extra_checks_or_miss);
1548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549 // The compare above could have been a SMI/SMI comparison. Guard against this
1550 // convincing us that we have a monomorphic JSFunction.
1551 __ JumpIfSmi(rdi, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553 // Increment the call count for monomorphic function calls.
1554 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1555 FixedArray::kHeaderSize + kPointerSize),
1556 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 __ bind(&call_function);
1559 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001560 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1561 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001563
1564 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001565 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 __ j(equal, &call);
1569
1570 // Check if we have an allocation site.
1571 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
1572 Heap::kAllocationSiteMapRootIndex);
1573 __ j(not_equal, &not_allocation_site);
1574
1575 // We have an allocation site.
1576 HandleArrayCase(masm, &miss);
1577
1578 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001580 // The following cases attempt to handle MISS cases without going to the
1581 // runtime.
1582 if (FLAG_trace_ic) {
1583 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 }
1585
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001586 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1587 __ j(equal, &uninitialized);
1588
1589 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1590 // to handle it here. More complex cases are dealt with in the runtime.
1591 __ AssertNotSmi(rcx);
1592 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1593 __ j(not_equal, &miss);
1594 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1595 TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001596
1597 __ bind(&call);
1598 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001599 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001601
1602 __ bind(&uninitialized);
1603
1604 // We are going monomorphic, provided we actually have a JSFunction.
1605 __ JumpIfSmi(rdi, &miss);
1606
1607 // Goto miss case if we do not have a function.
1608 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1609 __ j(not_equal, &miss);
1610
1611 // Make sure the function is not the Array() function, which requires special
1612 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614 __ cmpp(rdi, rcx);
1615 __ j(equal, &miss);
1616
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 // Make sure the function belongs to the same native context.
1618 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1619 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
1620 __ cmpp(rcx, NativeContextOperand());
1621 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001622
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623 // Initialize the call counter.
1624 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
1625 FixedArray::kHeaderSize + kPointerSize),
1626 Smi::FromInt(CallICNexus::kCallCountIncrement));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001627
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 // Store the function. Use a stub since we need a frame for allocation.
1629 // rbx - vector
1630 // rdx - slot (needs to be in smi form)
1631 // rdi - function
1632 {
1633 FrameScope scope(masm, StackFrame::INTERNAL);
1634 CreateWeakCellStub create_stub(isolate);
1635
1636 __ Integer32ToSmi(rdx, rdx);
1637 __ Push(rdi);
1638 __ CallStub(&create_stub);
1639 __ Pop(rdi);
1640 }
1641
1642 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001643
1644 // We are here because tracing is on or we encountered a MISS case we can't
1645 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 __ bind(&miss);
1647 GenerateMiss(masm);
1648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001649 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001650
1651 // Unreachable
1652 __ int3();
1653}
1654
1655
1656void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001659 // Push the receiver and the function and feedback info.
1660 __ Push(rdi);
1661 __ Push(rbx);
1662 __ Integer32ToSmi(rdx, rdx);
1663 __ Push(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001665 // Call the entry.
1666 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001667
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001668 // Move result to edi and exit the internal frame.
1669 __ movp(rdi, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670}
1671
1672
Steve Block44f0eee2011-05-26 01:26:41 +01001673bool CEntryStub::NeedsImmovableCode() {
1674 return false;
1675}
1676
1677
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001678void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1679 CEntryStub::GenerateAheadOfTime(isolate);
1680 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1681 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001682 // It is important that the store buffer overflow stubs are generated first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001683 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1684 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001685 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 BinaryOpICStub::GenerateAheadOfTime(isolate);
1687 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 StoreFastElementStub::GenerateAheadOfTime(isolate);
1689 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001690}
1691
1692
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693void CodeStub::GenerateFPStubs(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001694}
1695
1696
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001697void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1698 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1699 stub.GetCode();
1700 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1701 save_doubles.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001702}
1703
1704
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001705void CEntryStub::Generate(MacroAssembler* masm) {
1706 // rax: number of arguments including receiver
1707 // rbx: pointer to C function (C callee-saved)
1708 // rbp: frame pointer of calling JS frame (restored after C call)
1709 // rsp: stack pointer (restored after C call)
1710 // rsi: current context (restored)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 //
1712 // If argv_in_register():
1713 // r15: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001714
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001715 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001716
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001717#ifdef _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001718 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
1719 // stack to be aligned to 16 bytes. It only allows a single-word to be
1720 // returned in register rax. Larger return sizes must be written to an address
1721 // passed as a hidden first argument.
1722 const Register kCCallArg0 = rcx;
1723 const Register kCCallArg1 = rdx;
1724 const Register kCCallArg2 = r8;
1725 const Register kCCallArg3 = r9;
1726 const int kArgExtraStackSpace = 2;
1727 const int kMaxRegisterResultSize = 1;
1728#else
1729 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
1730 // are returned in rax, and a struct of two pointers are returned in rax+rdx.
1731 // Larger return sizes must be written to an address passed as a hidden first
1732 // argument.
1733 const Register kCCallArg0 = rdi;
1734 const Register kCCallArg1 = rsi;
1735 const Register kCCallArg2 = rdx;
1736 const Register kCCallArg3 = rcx;
1737 const int kArgExtraStackSpace = 0;
1738 const int kMaxRegisterResultSize = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001739#endif // _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001740
1741 // Enter the exit frame that transitions from JavaScript to C++.
1742 int arg_stack_space =
1743 kArgExtraStackSpace +
1744 (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 if (argv_in_register()) {
1746 DCHECK(!save_doubles());
1747 __ EnterApiExitFrame(arg_stack_space);
1748 // Move argc into r14 (argv is already in r15).
1749 __ movp(r14, rax);
1750 } else {
1751 __ EnterExitFrame(arg_stack_space, save_doubles());
1752 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001753
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001754 // rbx: pointer to builtin function (C callee-saved).
1755 // rbp: frame pointer of exit frame (restored after C call).
1756 // rsp: stack pointer (restored after C call).
1757 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01001758 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001759
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 // Check stack alignment.
1761 if (FLAG_debug_code) {
1762 __ CheckStackAlignment();
1763 }
1764
Ben Murdoch097c5b22016-05-18 11:27:45 +01001765 // Call C function. The arguments object will be created by stubs declared by
1766 // DECLARE_RUNTIME_FUNCTION().
1767 if (result_size() <= kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001768 // Pass a pointer to the Arguments object as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001769 // Return result in single register (rax), or a register pair (rax, rdx).
1770 __ movp(kCCallArg0, r14); // argc.
1771 __ movp(kCCallArg1, r15); // argv.
1772 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001774 DCHECK_LE(result_size(), 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 // Pass a pointer to the result location as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001776 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001777 // Pass a pointer to the Arguments object as the second argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001778 __ movp(kCCallArg1, r14); // argc.
1779 __ movp(kCCallArg2, r15); // argv.
1780 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782 __ call(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001783
Ben Murdoch097c5b22016-05-18 11:27:45 +01001784 if (result_size() > kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001785 // Read result values stored on stack. Result is stored
Ben Murdoch097c5b22016-05-18 11:27:45 +01001786 // above the the two Arguments object slots on Win64.
1787 DCHECK_LE(result_size(), 3);
1788 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
1789 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
1790 if (result_size() > 2) {
1791 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
1792 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001794 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 // Check result for exception sentinel.
1797 Label exception_returned;
1798 __ CompareRoot(rax, Heap::kExceptionRootIndex);
1799 __ j(equal, &exception_returned);
1800
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001801 // Check that there is no pending exception, otherwise we
1802 // should have returned the exception sentinel.
1803 if (FLAG_debug_code) {
1804 Label okay;
1805 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001806 ExternalReference pending_exception_address(
1807 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 Operand pending_exception_operand =
1809 masm->ExternalOperand(pending_exception_address);
1810 __ cmpp(r14, pending_exception_operand);
1811 __ j(equal, &okay, Label::kNear);
1812 __ int3();
1813 __ bind(&okay);
1814 }
1815
1816 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001817 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001818 __ ret(0);
1819
1820 // Handling of exception.
1821 __ bind(&exception_returned);
1822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 ExternalReference pending_handler_context_address(
1824 Isolate::kPendingHandlerContextAddress, isolate());
1825 ExternalReference pending_handler_code_address(
1826 Isolate::kPendingHandlerCodeAddress, isolate());
1827 ExternalReference pending_handler_offset_address(
1828 Isolate::kPendingHandlerOffsetAddress, isolate());
1829 ExternalReference pending_handler_fp_address(
1830 Isolate::kPendingHandlerFPAddress, isolate());
1831 ExternalReference pending_handler_sp_address(
1832 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834 // Ask the runtime for help to determine the handler. This will set rax to
1835 // contain the current pending exception, don't clobber it.
1836 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1837 isolate());
1838 {
1839 FrameScope scope(masm, StackFrame::MANUAL);
1840 __ movp(arg_reg_1, Immediate(0)); // argc.
1841 __ movp(arg_reg_2, Immediate(0)); // argv.
1842 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
1843 __ PrepareCallCFunction(3);
1844 __ CallCFunction(find_handler, 3);
1845 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001847 // Retrieve the handler context, SP and FP.
1848 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1849 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1850 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001851
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001852 // If the handler is a JS frame, restore the context to the frame. Note that
1853 // the context will be set to (rsi == 0) for non-JS frames.
1854 Label skip;
1855 __ testp(rsi, rsi);
1856 __ j(zero, &skip, Label::kNear);
1857 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1858 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001859
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860 // Compute the handler entry address and jump to it.
1861 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1862 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1863 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1864 __ jmp(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001865}
1866
1867
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001869 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001870 Label not_outermost_js, not_outermost_js_2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871
1872 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1873
Steve Block44f0eee2011-05-26 01:26:41 +01001874 { // NOLINT. Scope block confuses linter.
1875 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001876 // Set up frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877 __ pushq(rbp);
1878 __ movp(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001879
Steve Block44f0eee2011-05-26 01:26:41 +01001880 // Push the stack frame type marker twice.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001881 int marker = type();
Steve Block44f0eee2011-05-26 01:26:41 +01001882 // Scratch register is neither callee-save, nor an argument register on any
1883 // platform. It's free to use at this point.
1884 // Cannot use smi-register for loading yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
1886 __ Push(kScratchRegister); // context slot
1887 __ Push(kScratchRegister); // function slot
1888 // Save callee-saved registers (X64/X32/Win64 calling conventions).
1889 __ pushq(r12);
1890 __ pushq(r13);
1891 __ pushq(r14);
1892 __ pushq(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001893#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1895 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001896#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001897 __ pushq(rbx);
1898
1899#ifdef _WIN64
1900 // On Win64 XMM6-XMM15 are callee-save
1901 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1902 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1903 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1904 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1905 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1906 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1907 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1908 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1909 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1910 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1911 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1912#endif
Steve Block44f0eee2011-05-26 01:26:41 +01001913
1914 // Set up the roots and smi constant registers.
1915 // Needs to be done before any further smi loads.
Steve Block44f0eee2011-05-26 01:26:41 +01001916 __ InitializeRootRegister();
1917 }
1918
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001919 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001920 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001921 {
1922 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001923 __ Push(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001924 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001925
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001926 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001927 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001928 __ Load(rax, js_entry_sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001929 __ testp(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001930 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001931 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001932 __ movp(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01001933 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01001934 Label cont;
1935 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001936 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001937 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
1938 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001939
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001940 // Jump to a faked try block that does the invoke, with a faked catch
1941 // block that sets the pending exception.
1942 __ jmp(&invoke);
1943 __ bind(&handler_entry);
1944 handler_offset_ = handler_entry.pos();
1945 // Caught exception: Store result (exception) in the pending exception
1946 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00001947 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001949 __ Store(pending_exception, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001950 __ LoadRoot(rax, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001951 __ jmp(&exit);
1952
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001953 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001954 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001955 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001956
1957 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01001958 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
1959 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001960
1961 // Fake a receiver (NULL).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 __ Push(Immediate(0)); // receiver
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001963
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001964 // Invoke the function by calling through JS entry trampoline builtin and
1965 // pop the faked function when we return. We load the address from an
1966 // external reference instead of inlining the call target address directly
1967 // in the code, because the builtin stubs may not have been generated yet
1968 // at the time this code is generated.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001970 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001972 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001973 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001974 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001975 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001976 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001978 __ call(kScratchRegister);
1979
1980 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001982
Steve Block053d10c2011-06-13 19:13:29 +01001983 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01001984 // Check if the current stack frame is marked as the outermost JS frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985 __ Pop(rbx);
Steve Block053d10c2011-06-13 19:13:29 +01001986 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001987 __ j(not_equal, &not_outermost_js_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001988 __ Move(kScratchRegister, js_entry_sp);
1989 __ movp(Operand(kScratchRegister, 0), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001990 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001991
1992 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01001993 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001994 __ Pop(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001995 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001996
1997 // Restore callee-saved registers (X64 conventions).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001998#ifdef _WIN64
1999 // On Win64 XMM6-XMM15 are callee-save
2000 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2001 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2002 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2003 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2004 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2005 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2006 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2007 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2008 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2009 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2010 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2011#endif
2012
2013 __ popq(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002014#ifdef _WIN64
2015 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002016 __ popq(rsi);
2017 __ popq(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002018#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 __ popq(r15);
2020 __ popq(r14);
2021 __ popq(r13);
2022 __ popq(r12);
2023 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002024
2025 // Restore frame pointer and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 __ popq(rbp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002027 __ ret(0);
2028}
2029
2030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031void InstanceOfStub::Generate(MacroAssembler* masm) {
2032 Register const object = rdx; // Object (lhs).
2033 Register const function = rax; // Function (rhs).
2034 Register const object_map = rcx; // Map of {object}.
2035 Register const function_map = r8; // Map of {function}.
2036 Register const function_prototype = rdi; // Prototype of {function}.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002037
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002038 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2039 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002041 // Check if {object} is a smi.
2042 Label object_is_smi;
2043 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002044
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002045 // Lookup the {function} and the {object} map in the global instanceof cache.
2046 // Note: This is safe because we clear the global instanceof cache whenever
2047 // we change the prototype of any object.
2048 Label fast_case, slow_case;
2049 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2050 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2051 __ j(not_equal, &fast_case, Label::kNear);
2052 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2053 __ j(not_equal, &fast_case, Label::kNear);
2054 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2055 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 // If {object} is a smi we can safely return false if {function} is a JS
2058 // function, otherwise we have to miss to the runtime and throw an exception.
2059 __ bind(&object_is_smi);
2060 __ JumpIfSmi(function, &slow_case);
2061 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2062 __ j(not_equal, &slow_case);
2063 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2064 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066 // Fast-case: The {function} must be a valid JSFunction.
2067 __ bind(&fast_case);
2068 __ JumpIfSmi(function, &slow_case);
2069 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2070 __ j(not_equal, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 // Ensure that {function} has an instance prototype.
2073 __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
2074 Immediate(1 << Map::kHasNonInstancePrototype));
2075 __ j(not_zero, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002077 // Get the "prototype" (or initial map) of the {function}.
2078 __ movp(function_prototype,
2079 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2080 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002082 // Resolve the prototype if the {function} has an initial map. Afterwards the
2083 // {function_prototype} will be either the JSReceiver prototype object or the
2084 // hole value, which means that no instances of the {function} were created so
2085 // far and hence we should return false.
2086 Label function_prototype_valid;
2087 Register const function_prototype_map = kScratchRegister;
2088 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2089 __ j(not_equal, &function_prototype_valid, Label::kNear);
2090 __ movp(function_prototype,
2091 FieldOperand(function_prototype, Map::kPrototypeOffset));
2092 __ bind(&function_prototype_valid);
2093 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002094
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095 // Update the global instanceof cache with the current {object} map and
2096 // {function}. The cached answer will be set when it is known below.
2097 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2098 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002099
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002100 // Loop through the prototype chain looking for the {function} prototype.
2101 // Assume true, and change to false if not found.
2102 Label done, loop, fast_runtime_fallback;
2103 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002104 __ bind(&loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002105
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106 __ testb(FieldOperand(object_map, Map::kBitFieldOffset),
2107 Immediate(1 << Map::kIsAccessCheckNeeded));
2108 __ j(not_zero, &fast_runtime_fallback, Label::kNear);
2109 __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2110 __ j(equal, &fast_runtime_fallback, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 __ movp(object, FieldOperand(object_map, Map::kPrototypeOffset));
2113 __ cmpp(object, function_prototype);
2114 __ j(equal, &done, Label::kNear);
2115 __ CompareRoot(object, Heap::kNullValueRootIndex);
2116 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2117 __ j(not_equal, &loop);
2118 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2119 __ bind(&done);
2120 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2121 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 // Found Proxy or access check needed: Call the runtime.
2124 __ bind(&fast_runtime_fallback);
2125 __ PopReturnAddressTo(kScratchRegister);
2126 __ Push(object);
2127 __ Push(function_prototype);
2128 __ PushReturnAddressFrom(kScratchRegister);
2129 // Invalidate the instanceof cache.
2130 __ Move(rax, Smi::FromInt(0));
2131 __ StoreRoot(rax, Heap::kInstanceofCacheFunctionRootIndex);
2132 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
2133
2134 // Slow-case: Call the %InstanceOf runtime function.
2135 __ bind(&slow_case);
2136 __ PopReturnAddressTo(kScratchRegister);
2137 __ Push(object);
2138 __ Push(function);
2139 __ PushReturnAddressFrom(kScratchRegister);
2140 __ TailCallRuntime(Runtime::kInstanceOf);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002141}
2142
2143
2144// -------------------------------------------------------------------------
2145// StringCharCodeAtGenerator
2146
2147void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002148 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002149 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2150 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002151
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002152 // Fetch the instance type of the receiver into result register.
2153 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2154 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2155 // If the receiver is not a string trigger the non-string case.
2156 __ testb(result_, Immediate(kIsNotStringMask));
2157 __ j(not_zero, receiver_not_string_);
2158 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002159
2160 // If the index is non-smi trigger the non-smi case.
2161 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002162 __ bind(&got_smi_index_);
2163
2164 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002165 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002166 __ j(above_equal, index_out_of_range_);
2167
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002168 __ SmiToInteger32(index_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002169
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002170 StringCharLoadGenerator::Generate(
2171 masm, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002172
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002173 __ Integer32ToSmi(result_, result_);
2174 __ bind(&exit_);
2175}
2176
2177
2178void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002179 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002180 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002181 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002182
Ben Murdoch257744e2011-11-30 15:57:28 +00002183 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002184 // Index is not a smi.
2185 __ bind(&index_not_smi_);
2186 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00002187 __ CheckMap(index_,
2188 factory->heap_number_map(),
2189 index_not_number_,
2190 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002191 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002192 if (embed_mode == PART_OF_IC_HANDLER) {
2193 __ Push(LoadWithVectorDescriptor::VectorRegister());
2194 __ Push(LoadDescriptor::SlotRegister());
2195 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002196 __ Push(object_);
2197 __ Push(index_); // Consumed by runtime conversion function.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002198 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002199 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002200 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002202 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002203 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002204 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002205 if (!index_.is(rax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002206 // Save the conversion result before the pop instructions below
2207 // have a chance to overwrite it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 __ movp(index_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002209 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002210 __ Pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002211 if (embed_mode == PART_OF_IC_HANDLER) {
2212 __ Pop(LoadDescriptor::SlotRegister());
2213 __ Pop(LoadWithVectorDescriptor::VectorRegister());
2214 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002215 // Reload the instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002217 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2218 call_helper.AfterCall(masm);
2219 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002220 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002221 // Otherwise, return to the fast path.
2222 __ jmp(&got_smi_index_);
2223
2224 // Call runtime. We get here when the receiver is a string and the
2225 // index is a number, but the code of getting the actual character
2226 // is too complex (e.g., when the string needs to be flattened).
2227 __ bind(&call_runtime_);
2228 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002229 __ Push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002230 __ Integer32ToSmi(index_, index_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002231 __ Push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002232 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002233 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002234 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002235 }
2236 call_helper.AfterCall(masm);
2237 __ jmp(&exit_);
2238
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002239 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002240}
2241
2242
2243// -------------------------------------------------------------------------
2244// StringCharFromCodeGenerator
2245
2246void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2247 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2248 __ JumpIfNotSmi(code_, &slow_case_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002249 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002250 __ j(above, &slow_case_);
2251
2252 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2253 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002255 FixedArray::kHeaderSize));
2256 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2257 __ j(equal, &slow_case_);
2258 __ bind(&exit_);
2259}
2260
2261
2262void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002263 MacroAssembler* masm,
2264 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002265 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002266
2267 __ bind(&slow_case_);
2268 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002269 __ Push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002270 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002271 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002272 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002273 }
2274 call_helper.AfterCall(masm);
2275 __ jmp(&exit_);
2276
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002277 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002278}
2279
2280
2281void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2282 Register dest,
2283 Register src,
2284 Register count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 String::Encoding encoding) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002286 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00002287 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002288 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002289 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002290
2291 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002293 STATIC_ASSERT(2 == sizeof(uc16));
2294 __ addl(count, count);
2295 }
2296
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002297 // Copy remaining characters.
2298 Label loop;
2299 __ bind(&loop);
2300 __ movb(kScratchRegister, Operand(src, 0));
2301 __ movb(Operand(dest, 0), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002302 __ incp(src);
2303 __ incp(dest);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002304 __ decl(count);
2305 __ j(not_zero, &loop);
2306
2307 __ bind(&done);
2308}
2309
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002310
2311void SubStringStub::Generate(MacroAssembler* masm) {
2312 Label runtime;
2313
2314 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 // rsp[0] : return address
2316 // rsp[8] : to
2317 // rsp[16] : from
2318 // rsp[24] : string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002319
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002320 enum SubStringStubArgumentIndices {
2321 STRING_ARGUMENT_INDEX,
2322 FROM_ARGUMENT_INDEX,
2323 TO_ARGUMENT_INDEX,
2324 SUB_STRING_ARGUMENT_COUNT
2325 };
2326
2327 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2328 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002329
2330 // Make sure first argument is a string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002332 STATIC_ASSERT(kSmiTag == 0);
2333 __ testl(rax, Immediate(kSmiTagMask));
2334 __ j(zero, &runtime);
2335 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2336 __ j(NegateCondition(is_string), &runtime);
2337
2338 // rax: string
2339 // rbx: instance type
2340 // Calculate length of sub string using the smi values.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002341 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2342 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
Ben Murdochf87a2032010-10-22 12:50:53 +01002343 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002344
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002345 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002347 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 // Shorter than original string's length: an actual substring.
2349 __ j(below, &not_original_string, Label::kNear);
2350 // Longer than original string's length or negative: unsafe arguments.
2351 __ j(above, &runtime);
2352 // Return original string.
2353 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002354 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002355 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002356 __ bind(&not_original_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002357
2358 Label single_char;
2359 __ SmiCompare(rcx, Smi::FromInt(1));
2360 __ j(equal, &single_char);
2361
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002362 __ SmiToInteger32(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002363
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002364 // rax: string
2365 // rbx: instance type
2366 // rcx: sub string length
2367 // rdx: from index (smi)
2368 // Deal with different string types: update the index if necessary
2369 // and put the underlying string into edi.
2370 Label underlying_unpacked, sliced_string, seq_or_external_string;
2371 // If the string is not indirect, it can only be sequential or external.
2372 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2373 STATIC_ASSERT(kIsIndirectStringMask != 0);
2374 __ testb(rbx, Immediate(kIsIndirectStringMask));
2375 __ j(zero, &seq_or_external_string, Label::kNear);
2376
2377 __ testb(rbx, Immediate(kSlicedNotConsMask));
2378 __ j(not_zero, &sliced_string, Label::kNear);
2379 // Cons string. Check whether it is flat, then fetch first part.
2380 // Flat cons strings have an empty second part.
2381 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002383 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002385 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002387 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002388 __ jmp(&underlying_unpacked, Label::kNear);
2389
2390 __ bind(&sliced_string);
2391 // Sliced string. Fetch parent and correct start index by offset.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002392 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
2393 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002394 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002395 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002396 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2397 __ jmp(&underlying_unpacked, Label::kNear);
2398
2399 __ bind(&seq_or_external_string);
2400 // Sequential or external string. Just move string to the correct register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002401 __ movp(rdi, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002402
2403 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002404
Ben Murdoch589d6972011-11-30 16:04:58 +00002405 if (FLAG_string_slices) {
2406 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002407 // rdi: underlying subject string
2408 // rbx: instance type of underlying subject string
2409 // rdx: adjusted start index (smi)
2410 // rcx: length
Ben Murdoch589d6972011-11-30 16:04:58 +00002411 // If coming from the make_two_character_string path, the string
2412 // is too short to be sliced anyways.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
Ben Murdoch589d6972011-11-30 16:04:58 +00002414 // Short slice. Copy instead of slicing.
2415 __ j(less, &copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002416 // Allocate new sliced string. At this point we do not reload the instance
2417 // type including the string encoding because we simply rely on the info
2418 // provided by the original string. It does not matter if the original
2419 // string's encoding is wrong because we always have to recheck encoding of
2420 // the newly created string's parent anyways due to externalized strings.
2421 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002422 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002423 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2424 __ testb(rbx, Immediate(kStringEncodingMask));
2425 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002427 __ jmp(&set_slice_header, Label::kNear);
2428 __ bind(&two_byte_slice);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002429 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002430 __ bind(&set_slice_header);
Ben Murdoch589d6972011-11-30 16:04:58 +00002431 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002432 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
2433 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00002434 Immediate(String::kEmptyHashField));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002435 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
2436 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002437 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002438 __ ret(3 * kPointerSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00002439
2440 __ bind(&copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002441 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002442
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002443 // rdi: underlying subject string
2444 // rbx: instance type of underlying subject string
2445 // rdx: adjusted start index (smi)
2446 // rcx: length
2447 // The subject string can only be external or sequential string of either
2448 // encoding at this point.
2449 Label two_byte_sequential, sequential_string;
2450 STATIC_ASSERT(kExternalStringTag != 0);
2451 STATIC_ASSERT(kSeqStringTag == 0);
2452 __ testb(rbx, Immediate(kExternalStringTag));
2453 __ j(zero, &sequential_string);
2454
2455 // Handle external string.
2456 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002458 __ testb(rbx, Immediate(kShortExternalStringMask));
2459 __ j(not_zero, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002460 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002461 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2463 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002464
2465 __ bind(&sequential_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002466 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002467 __ testb(rbx, Immediate(kStringEncodingMask));
2468 __ j(zero, &two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002469
2470 // Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002471 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002472
2473 // rax: result string
2474 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002475 { // Locate character of sub string start.
2476 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002477 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2478 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002479 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002480 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002481 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002482
2483 // rax: result string
2484 // rcx: result length
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002485 // r14: first character of result
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002486 // rsi: character of sub string start
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002487 StringHelper::GenerateCopyCharacters(
2488 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002489 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002490 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002492 __ bind(&two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002493 // Allocate the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002494 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002495
2496 // rax: result string
2497 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002498 { // Locate character of sub string start.
2499 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002500 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2501 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002502 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002503 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002504 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002505
2506 // rax: result string
2507 // rcx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002508 // rdi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002509 // r14: character of sub string start
2510 StringHelper::GenerateCopyCharacters(
2511 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002512 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002513 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002514
2515 // Just jump to runtime to create the sub string.
2516 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002518
2519 __ bind(&single_char);
2520 // rax: string
2521 // rbx: instance type
2522 // rcx: sub string length (smi)
2523 // rdx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002524 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
2525 &runtime, STRING_INDEX_IS_NUMBER,
2526 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002527 generator.GenerateFast(masm);
2528 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2529 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002530}
2531
2532
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002533void ToNumberStub::Generate(MacroAssembler* masm) {
2534 // The ToNumber stub takes one argument in rax.
2535 Label not_smi;
2536 __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
2537 __ Ret();
2538 __ bind(&not_smi);
2539
2540 Label not_heap_number;
2541 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2542 Heap::kHeapNumberMapRootIndex);
2543 __ j(not_equal, &not_heap_number, Label::kNear);
2544 __ Ret();
2545 __ bind(&not_heap_number);
2546
2547 Label not_string, slow_string;
2548 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2549 // rax: object
2550 // rdi: object map
2551 __ j(above_equal, &not_string, Label::kNear);
2552 // Check if string has a cached array index.
2553 __ testl(FieldOperand(rax, String::kHashFieldOffset),
2554 Immediate(String::kContainsCachedArrayIndexMask));
2555 __ j(not_zero, &slow_string, Label::kNear);
2556 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
2557 __ IndexFromHash(rax, rax);
2558 __ Ret();
2559 __ bind(&slow_string);
2560 __ PopReturnAddressTo(rcx); // Pop return address.
2561 __ Push(rax); // Push argument.
2562 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002563 __ TailCallRuntime(Runtime::kStringToNumber);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002564 __ bind(&not_string);
2565
2566 Label not_oddball;
2567 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2568 __ j(not_equal, &not_oddball, Label::kNear);
2569 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2570 __ Ret();
2571 __ bind(&not_oddball);
2572
2573 __ PopReturnAddressTo(rcx); // Pop return address.
2574 __ Push(rax); // Push argument.
2575 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002576 __ TailCallRuntime(Runtime::kToNumber);
2577}
2578
2579
2580void ToLengthStub::Generate(MacroAssembler* masm) {
2581 // The ToLength stub takes on argument in rax.
2582 Label not_smi, positive_smi;
2583 __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
2584 STATIC_ASSERT(kSmiTag == 0);
2585 __ testp(rax, rax);
2586 __ j(greater_equal, &positive_smi, Label::kNear);
2587 __ xorl(rax, rax);
2588 __ bind(&positive_smi);
2589 __ Ret();
2590 __ bind(&not_smi);
2591
2592 __ PopReturnAddressTo(rcx); // Pop return address.
2593 __ Push(rax); // Push argument.
2594 __ PushReturnAddressFrom(rcx); // Push return address.
2595 __ TailCallRuntime(Runtime::kToLength);
2596}
2597
2598
2599void ToStringStub::Generate(MacroAssembler* masm) {
2600 // The ToString stub takes one argument in rax.
2601 Label is_number;
2602 __ JumpIfSmi(rax, &is_number, Label::kNear);
2603
2604 Label not_string;
2605 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2606 // rax: receiver
2607 // rdi: receiver map
2608 __ j(above_equal, &not_string, Label::kNear);
2609 __ Ret();
2610 __ bind(&not_string);
2611
2612 Label not_heap_number;
2613 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2614 __ j(not_equal, &not_heap_number, Label::kNear);
2615 __ bind(&is_number);
2616 NumberToStringStub stub(isolate());
2617 __ TailCallStub(&stub);
2618 __ bind(&not_heap_number);
2619
2620 Label not_oddball;
2621 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2622 __ j(not_equal, &not_oddball, Label::kNear);
2623 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2624 __ Ret();
2625 __ bind(&not_oddball);
2626
2627 __ PopReturnAddressTo(rcx); // Pop return address.
2628 __ Push(rax); // Push argument.
2629 __ PushReturnAddressFrom(rcx); // Push return address.
2630 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002631}
2632
2633
Ben Murdoch097c5b22016-05-18 11:27:45 +01002634void ToNameStub::Generate(MacroAssembler* masm) {
2635 // The ToName stub takes one argument in rax.
2636 Label is_number;
2637 __ JumpIfSmi(rax, &is_number, Label::kNear);
2638
2639 Label not_name;
2640 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2641 __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
2642 // rax: receiver
2643 // rdi: receiver map
2644 __ j(above, &not_name, Label::kNear);
2645 __ Ret();
2646 __ bind(&not_name);
2647
2648 Label not_heap_number;
2649 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2650 __ j(not_equal, &not_heap_number, Label::kNear);
2651 __ bind(&is_number);
2652 NumberToStringStub stub(isolate());
2653 __ TailCallStub(&stub);
2654 __ bind(&not_heap_number);
2655
2656 Label not_oddball;
2657 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2658 __ j(not_equal, &not_oddball, Label::kNear);
2659 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2660 __ Ret();
2661 __ bind(&not_oddball);
2662
2663 __ PopReturnAddressTo(rcx); // Pop return address.
2664 __ Push(rax); // Push argument.
2665 __ PushReturnAddressFrom(rcx); // Push return address.
2666 __ TailCallRuntime(Runtime::kToName);
2667}
2668
2669
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002670void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2671 Register left,
2672 Register right,
2673 Register scratch1,
2674 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002675 Register length = scratch1;
2676
2677 // Compare lengths.
2678 Label check_zero_length;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002679 __ movp(length, FieldOperand(left, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002680 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2681 __ j(equal, &check_zero_length, Label::kNear);
2682 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2683 __ ret(0);
2684
2685 // Check if the length is zero.
2686 Label compare_chars;
2687 __ bind(&check_zero_length);
2688 STATIC_ASSERT(kSmiTag == 0);
2689 __ SmiTest(length);
2690 __ j(not_zero, &compare_chars, Label::kNear);
2691 __ Move(rax, Smi::FromInt(EQUAL));
2692 __ ret(0);
2693
2694 // Compare characters.
2695 __ bind(&compare_chars);
2696 Label strings_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002697 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2698 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002699
2700 // Characters are equal.
2701 __ Move(rax, Smi::FromInt(EQUAL));
2702 __ ret(0);
2703
2704 // Characters are not equal.
2705 __ bind(&strings_not_equal);
2706 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2707 __ ret(0);
2708}
2709
2710
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002711void StringHelper::GenerateCompareFlatOneByteStrings(
2712 MacroAssembler* masm, Register left, Register right, Register scratch1,
2713 Register scratch2, Register scratch3, Register scratch4) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002714 // Ensure that you can always subtract a string length from a non-negative
2715 // number (e.g. another length).
2716 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
2717
2718 // Find minimum length and length difference.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002719 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
2720 __ movp(scratch4, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002721 __ SmiSub(scratch4,
2722 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002723 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002724 // Register scratch4 now holds left.length - right.length.
2725 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00002726 Label left_shorter;
2727 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002728 // The right string isn't longer that the left one.
2729 // Get the right string's length by subtracting the (non-negative) difference
2730 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002731 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002732 __ bind(&left_shorter);
2733 // Register scratch1 now holds Min(left.length, right.length).
2734 const Register min_length = scratch1;
2735
Ben Murdoch257744e2011-11-30 15:57:28 +00002736 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002737 // If min-length is zero, go directly to comparing lengths.
2738 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002739 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002740
Ben Murdoch257744e2011-11-30 15:57:28 +00002741 // Compare loop.
2742 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002743 GenerateOneByteCharsCompareLoop(
2744 masm, left, right, min_length, scratch2, &result_not_equal,
2745 // In debug-code mode, SmiTest below might push
2746 // the target label outside the near range.
2747 Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002748
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002749 // Completed loop without finding different characters.
2750 // Compare lengths (precomputed).
2751 __ bind(&compare_lengths);
2752 __ SmiTest(length_difference);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002753 Label length_not_equal;
2754 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002755
2756 // Result is EQUAL.
2757 __ Move(rax, Smi::FromInt(EQUAL));
2758 __ ret(0);
2759
Ben Murdoch257744e2011-11-30 15:57:28 +00002760 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002761 Label result_less;
2762 __ bind(&length_not_equal);
2763 __ j(greater, &result_greater, Label::kNear);
2764 __ jmp(&result_less, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002765 __ bind(&result_not_equal);
2766 // Unequal comparison of left to right, either character or length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002767 __ j(above, &result_greater, Label::kNear);
2768 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002769
2770 // Result is LESS.
2771 __ Move(rax, Smi::FromInt(LESS));
2772 __ ret(0);
2773
2774 // Result is GREATER.
2775 __ bind(&result_greater);
2776 __ Move(rax, Smi::FromInt(GREATER));
2777 __ ret(0);
2778}
2779
2780
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002781void StringHelper::GenerateOneByteCharsCompareLoop(
2782 MacroAssembler* masm, Register left, Register right, Register length,
2783 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002784 // Change index to run from -length to -1 by adding length to string
2785 // start. This means that loop ends when index reaches zero, which
2786 // doesn't need an additional compare.
2787 __ SmiToInteger32(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 __ leap(left,
2789 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2790 __ leap(right,
2791 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2792 __ negq(length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002793 Register index = length; // index = -length;
2794
2795 // Compare loop.
2796 Label loop;
2797 __ bind(&loop);
2798 __ movb(scratch, Operand(left, index, times_1, 0));
2799 __ cmpb(scratch, Operand(right, index, times_1, 0));
2800 __ j(not_equal, chars_not_equal, near_jump);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002801 __ incq(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00002802 __ j(not_zero, &loop);
2803}
2804
2805
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002806void StringCompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002807 // ----------- S t a t e -------------
2808 // -- rdx : left string
2809 // -- rax : right string
2810 // -- rsp[0] : return address
2811 // -----------------------------------
2812 __ AssertString(rdx);
2813 __ AssertString(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002814
2815 // Check for identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002816 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002817 __ cmpp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002818 __ j(not_equal, &not_same, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002819 __ Move(rax, Smi::FromInt(EQUAL));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002820 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
2821 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002822
2823 __ bind(&not_same);
2824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825 // Check that both are sequential one-byte strings.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002826 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002827 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002828
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002829 // Inline comparison of one-byte strings.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002830 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002831 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi,
2832 r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002833
2834 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
2835 // tagged as a small integer.
2836 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002837 __ PopReturnAddressTo(rcx);
2838 __ Push(rdx);
2839 __ Push(rax);
2840 __ PushReturnAddressFrom(rcx);
2841 __ TailCallRuntime(Runtime::kStringCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002842}
2843
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002844
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002845void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2846 // ----------- S t a t e -------------
2847 // -- rdx : left
2848 // -- rax : right
2849 // -- rsp[0] : return address
2850 // -----------------------------------
2851
2852 // Load rcx with the allocation site. We stick an undefined dummy value here
2853 // and replace it with the real allocation site later when we instantiate this
2854 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2855 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
2856
2857 // Make sure that we actually patched the allocation site.
2858 if (FLAG_debug_code) {
2859 __ testb(rcx, Immediate(kSmiTagMask));
2860 __ Assert(not_equal, kExpectedAllocationSite);
2861 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2862 isolate()->factory()->allocation_site_map());
2863 __ Assert(equal, kExpectedAllocationSite);
2864 }
2865
2866 // Tail call into the stub that handles binary operations with allocation
2867 // sites.
2868 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2869 __ TailCallStub(&stub);
2870}
2871
2872
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002873void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2874 DCHECK_EQ(CompareICState::BOOLEAN, state());
2875 Label miss;
2876 Label::Distance const miss_distance =
2877 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2878
2879 __ JumpIfSmi(rdx, &miss, miss_distance);
2880 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
2881 __ JumpIfSmi(rax, &miss, miss_distance);
2882 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2883 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2884 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002885 if (!Token::IsEqualityOp(op())) {
2886 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2887 __ AssertSmi(rax);
2888 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
2889 __ AssertSmi(rdx);
2890 __ pushq(rax);
2891 __ movq(rax, rdx);
2892 __ popq(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002893 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002894 __ subp(rax, rdx);
2895 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002896
2897 __ bind(&miss);
2898 GenerateMiss(masm);
2899}
2900
2901
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002902void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2903 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00002904 Label miss;
2905 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002906
2907 if (GetCondition() == equal) {
2908 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002910 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002911 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002912 __ subp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002913 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002914 // Correct sign of result in case of overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002915 __ notp(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002916 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002917 __ movp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002918 }
2919 __ ret(0);
2920
2921 __ bind(&miss);
2922 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002923}
2924
2925
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002926void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2927 DCHECK(state() == CompareICState::NUMBER);
Steve Block1e0659c2011-05-24 12:43:12 +01002928
Ben Murdoch257744e2011-11-30 15:57:28 +00002929 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002930 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00002931 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01002932
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002933 if (left() == CompareICState::SMI) {
2934 __ JumpIfNotSmi(rdx, &miss);
2935 }
2936 if (right() == CompareICState::SMI) {
2937 __ JumpIfNotSmi(rax, &miss);
2938 }
2939
2940 // Load left and right operand.
2941 Label done, left, left_smi, right_smi;
2942 __ JumpIfSmi(rax, &right_smi, Label::kNear);
2943 __ CompareMap(rax, isolate()->factory()->heap_number_map());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002944 __ j(not_equal, &maybe_undefined1, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002945 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 __ jmp(&left, Label::kNear);
2947 __ bind(&right_smi);
2948 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
2949 __ Cvtlsi2sd(xmm1, rcx);
Steve Block1e0659c2011-05-24 12:43:12 +01002950
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002951 __ bind(&left);
2952 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2953 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2954 __ j(not_equal, &maybe_undefined2, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002955 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956 __ jmp(&done);
2957 __ bind(&left_smi);
2958 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
2959 __ Cvtlsi2sd(xmm0, rcx);
2960
2961 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01002962 // Compare operands
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002963 __ Ucomisd(xmm0, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002964
2965 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00002966 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002967
2968 // Return a result of -1, 0, or 1, based on EFLAGS.
2969 // Performing mov, because xor would destroy the flag register.
2970 __ movl(rax, Immediate(0));
2971 __ movl(rcx, Immediate(0));
2972 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002973 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
Steve Block1e0659c2011-05-24 12:43:12 +01002974 __ ret(0);
2975
2976 __ bind(&unordered);
Steve Block1e0659c2011-05-24 12:43:12 +01002977 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002978 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002979 CompareICState::GENERIC, CompareICState::GENERIC);
Steve Block1e0659c2011-05-24 12:43:12 +01002980 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2981
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002982 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 if (Token::IsOrderedRelationalCompareOp(op())) {
2984 __ Cmp(rax, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002985 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002986 __ JumpIfSmi(rdx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002987 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
2988 __ j(not_equal, &maybe_undefined2, Label::kNear);
2989 __ jmp(&unordered);
2990 }
2991
2992 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002993 if (Token::IsOrderedRelationalCompareOp(op())) {
2994 __ Cmp(rdx, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002995 __ j(equal, &unordered);
2996 }
2997
Steve Block1e0659c2011-05-24 12:43:12 +01002998 __ bind(&miss);
2999 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003000}
3001
3002
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003003void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3004 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3005 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00003006
3007 // Registers containing left and right operands respectively.
3008 Register left = rdx;
3009 Register right = rax;
3010 Register tmp1 = rcx;
3011 Register tmp2 = rbx;
3012
3013 // Check that both operands are heap objects.
3014 Label miss;
3015 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3016 __ j(cond, &miss, Label::kNear);
3017
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003018 // Check that both operands are internalized strings.
3019 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3020 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3021 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3022 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3023 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3024 __ orp(tmp1, tmp2);
3025 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3026 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003028 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003029 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003030 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003031 // Make sure rax is non-zero. At this point input operands are
3032 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003033 DCHECK(right.is(rax));
Ben Murdoch257744e2011-11-30 15:57:28 +00003034 __ j(not_equal, &done, Label::kNear);
3035 STATIC_ASSERT(EQUAL == 0);
3036 STATIC_ASSERT(kSmiTag == 0);
3037 __ Move(rax, Smi::FromInt(EQUAL));
3038 __ bind(&done);
3039 __ ret(0);
3040
3041 __ bind(&miss);
3042 GenerateMiss(masm);
3043}
3044
3045
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3047 DCHECK(state() == CompareICState::UNIQUE_NAME);
3048 DCHECK(GetCondition() == equal);
3049
3050 // Registers containing left and right operands respectively.
3051 Register left = rdx;
3052 Register right = rax;
3053 Register tmp1 = rcx;
3054 Register tmp2 = rbx;
3055
3056 // Check that both operands are heap objects.
3057 Label miss;
3058 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3059 __ j(cond, &miss, Label::kNear);
3060
3061 // Check that both operands are unique names. This leaves the instance
3062 // types loaded in tmp1 and tmp2.
3063 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3064 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3065 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3066 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3067
3068 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3069 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3070
3071 // Unique names are compared by identity.
3072 Label done;
3073 __ cmpp(left, right);
3074 // Make sure rax is non-zero. At this point input operands are
3075 // guaranteed to be non-zero.
3076 DCHECK(right.is(rax));
3077 __ j(not_equal, &done, Label::kNear);
3078 STATIC_ASSERT(EQUAL == 0);
3079 STATIC_ASSERT(kSmiTag == 0);
3080 __ Move(rax, Smi::FromInt(EQUAL));
3081 __ bind(&done);
3082 __ ret(0);
3083
3084 __ bind(&miss);
3085 GenerateMiss(masm);
3086}
3087
3088
3089void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3090 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003091 Label miss;
3092
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003093 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003094
Ben Murdoch257744e2011-11-30 15:57:28 +00003095 // Registers containing left and right operands respectively.
3096 Register left = rdx;
3097 Register right = rax;
3098 Register tmp1 = rcx;
3099 Register tmp2 = rbx;
3100 Register tmp3 = rdi;
3101
3102 // Check that both operands are heap objects.
3103 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3104 __ j(cond, &miss);
3105
3106 // Check that both operands are strings. This leaves the instance
3107 // types loaded in tmp1 and tmp2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3109 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3110 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3111 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3112 __ movp(tmp3, tmp1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003113 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003114 __ orp(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003115 __ testb(tmp3, Immediate(kIsNotStringMask));
3116 __ j(not_zero, &miss);
3117
3118 // Fast check for identical strings.
3119 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003120 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003121 __ j(not_equal, &not_same, Label::kNear);
3122 STATIC_ASSERT(EQUAL == 0);
3123 STATIC_ASSERT(kSmiTag == 0);
3124 __ Move(rax, Smi::FromInt(EQUAL));
3125 __ ret(0);
3126
3127 // Handle not identical strings.
3128 __ bind(&not_same);
3129
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003130 // Check that both strings are internalized strings. If they are, we're done
3131 // because we already know they are not identical. We also know they are both
3132 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003133 if (equality) {
3134 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003135 STATIC_ASSERT(kInternalizedTag == 0);
3136 __ orp(tmp1, tmp2);
3137 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
3138 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003139 // Make sure rax is non-zero. At this point input operands are
3140 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003141 DCHECK(right.is(rax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003142 __ ret(0);
3143 __ bind(&do_compare);
3144 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003145
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003146 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003147 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003148 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003149
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003150 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003151 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003152 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3153 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003154 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155 StringHelper::GenerateCompareFlatOneByteStrings(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003156 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3157 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003158
3159 // Handle more complex cases in runtime.
3160 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003161 __ PopReturnAddressTo(tmp1);
3162 __ Push(left);
3163 __ Push(right);
3164 __ PushReturnAddressFrom(tmp1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003165 if (equality) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003166 __ TailCallRuntime(Runtime::kStringEquals);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003167 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003168 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003169 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003170
3171 __ bind(&miss);
3172 GenerateMiss(masm);
3173}
3174
3175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003176void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3177 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00003178 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01003179 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003180 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003182 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3183 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
3184 __ j(below, &miss, Label::kNear);
3185 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
3186 __ j(below, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003187
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003188 DCHECK_EQ(equal, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003189 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003190 __ ret(0);
3191
3192 __ bind(&miss);
3193 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003194}
3195
3196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003197void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003198 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003199 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003200 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3201 __ j(either_smi, &miss, Label::kNear);
3202
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003203 __ GetWeakValue(rdi, cell);
3204 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003205 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003206 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003207 __ j(not_equal, &miss, Label::kNear);
3208
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003209 if (Token::IsEqualityOp(op())) {
3210 __ subp(rax, rdx);
3211 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003212 } else {
3213 __ PopReturnAddressTo(rcx);
3214 __ Push(rdx);
3215 __ Push(rax);
3216 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
3217 __ PushReturnAddressFrom(rcx);
3218 __ TailCallRuntime(Runtime::kCompare);
3219 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003220
3221 __ bind(&miss);
3222 GenerateMiss(masm);
3223}
3224
3225
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003227 {
3228 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003229 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 __ Push(rdx);
3231 __ Push(rax);
3232 __ Push(rdx);
3233 __ Push(rax);
3234 __ Push(Smi::FromInt(op()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003235 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003236
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003237 // Compute the entry point of the rewritten stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003238 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3239 __ Pop(rax);
3240 __ Pop(rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003241 }
Steve Block1e0659c2011-05-24 12:43:12 +01003242
Steve Block1e0659c2011-05-24 12:43:12 +01003243 // Do a tail call to the rewritten stub.
3244 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003245}
3246
Steve Block1e0659c2011-05-24 12:43:12 +01003247
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003248void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3249 Label* miss,
3250 Label* done,
3251 Register properties,
3252 Handle<Name> name,
3253 Register r0) {
3254 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003255 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3256 // not equal to the name and kProbes-th slot is not used (its name is the
3257 // undefined value), it guarantees the hash table doesn't contain the
3258 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003259 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003260 for (int i = 0; i < kInlinedProbes; i++) {
3261 // r0 points to properties hash.
3262 // Compute the masked index: (hash + i + i * i) & mask.
3263 Register index = r0;
3264 // Capacity is smi 2^n.
3265 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3266 __ decl(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003267 __ andp(index,
3268 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003269
3270 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003271 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003272 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003273
3274 Register entity_name = r0;
3275 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003276 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003277 __ movp(entity_name, Operand(properties,
Ben Murdoch257744e2011-11-30 15:57:28 +00003278 index,
3279 times_pointer_size,
3280 kElementsStartOffset - kHeapObjectTag));
3281 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3282 __ j(equal, done);
3283
3284 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003285 __ Cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003286 __ j(equal, miss);
3287
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003288 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003289 // Check for the hole and skip.
3290 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003291 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003292
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003293 // Check if the entry name is not a unique name.
3294 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3295 __ JumpIfNotUniqueNameInstanceType(
3296 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3297 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003298 }
3299
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003300 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3301 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003302 __ Push(Handle<Object>(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003303 __ Push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003304 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003305 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003306 __ j(not_zero, miss);
3307 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003308}
3309
3310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003311// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003312// |done| label if a property with the given name is found leaving the
3313// index into the dictionary in |r1|. Jump to the |miss| label
3314// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003315void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3316 Label* miss,
3317 Label* done,
3318 Register elements,
3319 Register name,
3320 Register r0,
3321 Register r1) {
3322 DCHECK(!elements.is(r0));
3323 DCHECK(!elements.is(r1));
3324 DCHECK(!name.is(r0));
3325 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003326
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003327 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003328
3329 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3330 __ decl(r0);
3331
3332 for (int i = 0; i < kInlinedProbes; i++) {
3333 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003334 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3335 __ shrl(r1, Immediate(Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003336 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003337 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003338 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003339 __ andp(r1, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003340
3341 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003342 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003343 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
Ben Murdoch257744e2011-11-30 15:57:28 +00003344
3345 // Check if the key is identical to the name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003346 __ cmpp(name, Operand(elements, r1, times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003347 kElementsStartOffset - kHeapObjectTag));
3348 __ j(equal, done);
3349 }
3350
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003351 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3352 POSITIVE_LOOKUP);
3353 __ Push(name);
3354 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3355 __ shrl(r0, Immediate(Name::kHashShift));
3356 __ Push(r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003357 __ CallStub(&stub);
3358
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003359 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003360 __ j(zero, miss);
3361 __ jmp(done);
3362}
3363
3364
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003365void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003366 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3367 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003368 // Stack frame on entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003369 // rsp[0 * kPointerSize] : return address.
3370 // rsp[1 * kPointerSize] : key's hash.
3371 // rsp[2 * kPointerSize] : key.
Ben Murdoch257744e2011-11-30 15:57:28 +00003372 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003373 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003374 // result_: used as scratch.
3375 // index_: will hold an index of entry if lookup is successful.
3376 // might alias with result_.
3377 // Returns:
3378 // result_ is zero if lookup failed, non zero otherwise.
3379
3380 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3381
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003382 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003383
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003384 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003385 __ decl(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003386 __ Push(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003387
3388 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3389 // not equal to the name and kProbes-th slot is not used (its name is the
3390 // undefined value), it guarantees the hash table doesn't contain the
3391 // property. It's true even if some slots represent deleted properties
3392 // (their names are the null value).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003393 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3394 kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003395 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3396 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003397 __ movp(scratch, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003398 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003399 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003400 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003401 __ andp(scratch, Operand(rsp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003402
3403 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003404 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003405 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003406
3407 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003408 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003409 kElementsStartOffset - kHeapObjectTag));
3410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411 __ Cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003412 __ j(equal, &not_in_dictionary);
3413
3414 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003415 __ cmpp(scratch, args.GetArgumentOperand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003416 __ j(equal, &in_dictionary);
3417
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003418 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3419 // If we hit a key that is not a unique name during negative
3420 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003421 // key we are looking for.
3422
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003423 // Check if the entry name is not a unique name.
3424 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3425 __ JumpIfNotUniqueNameInstanceType(
3426 FieldOperand(scratch, Map::kInstanceTypeOffset),
3427 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003428 }
3429 }
3430
3431 __ bind(&maybe_in_dictionary);
3432 // If we are doing negative lookup then probing failure should be
3433 // treated as a lookup success. For positive lookup probing failure
3434 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003435 if (mode() == POSITIVE_LOOKUP) {
3436 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003437 __ Drop(1);
3438 __ ret(2 * kPointerSize);
3439 }
3440
3441 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003442 __ movp(scratch, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003443 __ Drop(1);
3444 __ ret(2 * kPointerSize);
3445
3446 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003447 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003448 __ Drop(1);
3449 __ ret(2 * kPointerSize);
3450}
3451
3452
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003453void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3454 Isolate* isolate) {
3455 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3456 stub1.GetCode();
3457 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3458 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003459}
3460
3461
3462// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3463// the value has just been written into the object, now this stub makes sure
3464// we keep the GC informed. The word in the object where the value has been
3465// written is in the address register.
3466void RecordWriteStub::Generate(MacroAssembler* masm) {
3467 Label skip_to_incremental_noncompacting;
3468 Label skip_to_incremental_compacting;
3469
3470 // The first two instructions are generated with labels so as to get the
3471 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3472 // forth between a compare instructions (a nop in this position) and the
3473 // real branch when we start and stop incremental heap marking.
3474 // See RecordWriteStub::Patch for details.
3475 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3476 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3477
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003478 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3479 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003480 MacroAssembler::kReturnAtEnd);
3481 } else {
3482 __ ret(0);
3483 }
3484
3485 __ bind(&skip_to_incremental_noncompacting);
3486 GenerateIncremental(masm, INCREMENTAL);
3487
3488 __ bind(&skip_to_incremental_compacting);
3489 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3490
3491 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3492 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3493 masm->set_byte_at(0, kTwoByteNopInstruction);
3494 masm->set_byte_at(2, kFiveByteNopInstruction);
3495}
3496
3497
3498void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3499 regs_.Save(masm);
3500
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003501 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003502 Label dont_need_remembered_set;
3503
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003504 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003505 __ JumpIfNotInNewSpace(regs_.scratch0(),
3506 regs_.scratch0(),
3507 &dont_need_remembered_set);
3508
Ben Murdoch097c5b22016-05-18 11:27:45 +01003509 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3510 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003511
3512 // First notify the incremental marker if necessary, then update the
3513 // remembered set.
3514 CheckNeedsToInformIncrementalMarker(
3515 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003516 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003517 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003518 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003519 MacroAssembler::kReturnAtEnd);
3520
3521 __ bind(&dont_need_remembered_set);
3522 }
3523
3524 CheckNeedsToInformIncrementalMarker(
3525 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003526 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003527 regs_.Restore(masm);
3528 __ ret(0);
3529}
3530
3531
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003532void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3533 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003534 Register address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003535 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
3536 DCHECK(!address.is(regs_.object()));
3537 DCHECK(!address.is(arg_reg_1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003538 __ Move(address, regs_.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003539 __ Move(arg_reg_1, regs_.object());
3540 // TODO(gc) Can we just set address arg2 in the beginning?
3541 __ Move(arg_reg_2, address);
3542 __ LoadAddress(arg_reg_3,
3543 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003544 int argument_count = 3;
3545
3546 AllowExternalCallThatCantCauseGC scope(masm);
3547 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003548 __ CallCFunction(
3549 ExternalReference::incremental_marking_record_write_function(isolate()),
3550 argument_count);
3551 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003552}
3553
3554
3555void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3556 MacroAssembler* masm,
3557 OnNoNeedToInformIncrementalMarker on_no_need,
3558 Mode mode) {
3559 Label on_black;
3560 Label need_incremental;
3561 Label need_incremental_pop_object;
3562
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003563 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3564 __ andp(regs_.scratch0(), regs_.object());
3565 __ movp(regs_.scratch1(),
3566 Operand(regs_.scratch0(),
3567 MemoryChunk::kWriteBarrierCounterOffset));
3568 __ subp(regs_.scratch1(), Immediate(1));
3569 __ movp(Operand(regs_.scratch0(),
3570 MemoryChunk::kWriteBarrierCounterOffset),
3571 regs_.scratch1());
3572 __ j(negative, &need_incremental);
3573
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003574 // Let's look at the color of the object: If it is not black we don't have
3575 // to inform the incremental marker.
3576 __ JumpIfBlack(regs_.object(),
3577 regs_.scratch0(),
3578 regs_.scratch1(),
3579 &on_black,
3580 Label::kNear);
3581
3582 regs_.Restore(masm);
3583 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003584 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003585 MacroAssembler::kReturnAtEnd);
3586 } else {
3587 __ ret(0);
3588 }
3589
3590 __ bind(&on_black);
3591
3592 // Get the value from the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003593 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003594
3595 if (mode == INCREMENTAL_COMPACTION) {
3596 Label ensure_not_white;
3597
3598 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3599 regs_.scratch1(), // Scratch.
3600 MemoryChunk::kEvacuationCandidateMask,
3601 zero,
3602 &ensure_not_white,
3603 Label::kNear);
3604
3605 __ CheckPageFlag(regs_.object(),
3606 regs_.scratch1(), // Scratch.
3607 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3608 zero,
3609 &need_incremental);
3610
3611 __ bind(&ensure_not_white);
3612 }
3613
3614 // We need an extra register for this, so we push the object register
3615 // temporarily.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003616 __ Push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003617 __ JumpIfWhite(regs_.scratch0(), // The value.
3618 regs_.scratch1(), // Scratch.
3619 regs_.object(), // Scratch.
3620 &need_incremental_pop_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003621 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003622
3623 regs_.Restore(masm);
3624 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003625 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003626 MacroAssembler::kReturnAtEnd);
3627 } else {
3628 __ ret(0);
3629 }
3630
3631 __ bind(&need_incremental_pop_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003632 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003633
3634 __ bind(&need_incremental);
3635
3636 // Fall through when we need to inform the incremental marker.
3637}
3638
3639
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003640void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3641 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3642 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3643 int parameter_count_offset =
3644 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
3645 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
3646 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3647 __ PopReturnAddressTo(rcx);
3648 int additional_offset =
3649 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3650 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
3651 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
3652}
3653
3654
3655void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003656 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3657 LoadICStub stub(isolate(), state());
3658 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003659}
3660
3661
3662void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003663 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3664 KeyedLoadICStub stub(isolate(), state());
3665 stub.GenerateForTrampoline(masm);
3666}
3667
3668
3669static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3670 Register receiver_map, Register scratch1,
3671 Register scratch2, Register scratch3,
3672 bool is_polymorphic, Label* miss) {
3673 // feedback initially contains the feedback array
3674 Label next_loop, prepare_next;
3675 Label start_polymorphic;
3676
3677 Register counter = scratch1;
3678 Register length = scratch2;
3679 Register cached_map = scratch3;
3680
3681 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3682 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3683 __ j(not_equal, &start_polymorphic);
3684
3685 // found, now call handler.
3686 Register handler = feedback;
3687 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3688 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3689 __ jmp(handler);
3690
3691 // Polymorphic, we have to loop from 2 to N
3692 __ bind(&start_polymorphic);
3693 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3694 if (!is_polymorphic) {
3695 // If the IC could be monomorphic we have to make sure we don't go past the
3696 // end of the feedback array.
3697 __ cmpl(length, Immediate(2));
3698 __ j(equal, miss);
3699 }
3700 __ movl(counter, Immediate(2));
3701
3702 __ bind(&next_loop);
3703 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3704 FixedArray::kHeaderSize));
3705 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3706 __ j(not_equal, &prepare_next);
3707 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
3708 FixedArray::kHeaderSize + kPointerSize));
3709 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3710 __ jmp(handler);
3711
3712 __ bind(&prepare_next);
3713 __ addl(counter, Immediate(2));
3714 __ cmpl(counter, length);
3715 __ j(less, &next_loop);
3716
3717 // We exhausted our array of map handler pairs.
3718 __ jmp(miss);
3719}
3720
3721
3722static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3723 Register receiver_map, Register feedback,
3724 Register vector, Register integer_slot,
3725 Label* compare_map, Label* load_smi_map,
3726 Label* try_array) {
3727 __ JumpIfSmi(receiver, load_smi_map);
3728 __ movp(receiver_map, FieldOperand(receiver, 0));
3729
3730 __ bind(compare_map);
3731 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
3732 __ j(not_equal, try_array);
3733 Register handler = feedback;
3734 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
3735 FixedArray::kHeaderSize + kPointerSize));
3736 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3737 __ jmp(handler);
3738}
3739
3740
3741void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3742
3743
3744void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3745 GenerateImpl(masm, true);
3746}
3747
3748
3749void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3750 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3751 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx
3752 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3753 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3754 Register feedback = rdi;
3755 Register integer_slot = r8;
3756 Register receiver_map = r9;
3757
3758 __ SmiToInteger32(integer_slot, slot);
3759 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3760 FixedArray::kHeaderSize));
3761
3762 // Try to quickly handle the monomorphic case without knowing for sure
3763 // if we have a weak cell in feedback. We do know it's safe to look
3764 // at WeakCell::kValueOffset.
3765 Label try_array, load_smi_map, compare_map;
3766 Label not_array, miss;
3767 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3768 integer_slot, &compare_map, &load_smi_map, &try_array);
3769
3770 // Is it a fixed array?
3771 __ bind(&try_array);
3772 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3773 __ j(not_equal, &not_array);
3774 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3775 &miss);
3776
3777 __ bind(&not_array);
3778 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3779 __ j(not_equal, &miss);
3780 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3781 Code::ComputeHandlerFlags(Code::LOAD_IC));
3782 masm->isolate()->stub_cache()->GenerateProbe(
3783 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
3784
3785 __ bind(&miss);
3786 LoadIC::GenerateMiss(masm);
3787
3788 __ bind(&load_smi_map);
3789 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3790 __ jmp(&compare_map);
3791}
3792
3793
3794void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3795 GenerateImpl(masm, false);
3796}
3797
3798
3799void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3800 GenerateImpl(masm, true);
3801}
3802
3803
3804void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3805 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3806 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx
3807 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3808 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3809 Register feedback = rdi;
3810 Register integer_slot = r8;
3811 Register receiver_map = r9;
3812
3813 __ SmiToInteger32(integer_slot, slot);
3814 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3815 FixedArray::kHeaderSize));
3816
3817 // Try to quickly handle the monomorphic case without knowing for sure
3818 // if we have a weak cell in feedback. We do know it's safe to look
3819 // at WeakCell::kValueOffset.
3820 Label try_array, load_smi_map, compare_map;
3821 Label not_array, miss;
3822 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3823 integer_slot, &compare_map, &load_smi_map, &try_array);
3824
3825 __ bind(&try_array);
3826 // Is it a fixed array?
3827 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3828 __ j(not_equal, &not_array);
3829
3830 // We have a polymorphic element handler.
3831 Label polymorphic, try_poly_name;
3832 __ bind(&polymorphic);
3833 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3834 &miss);
3835
3836 __ bind(&not_array);
3837 // Is it generic?
3838 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3839 __ j(not_equal, &try_poly_name);
3840 Handle<Code> megamorphic_stub =
3841 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3842 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3843
3844 __ bind(&try_poly_name);
3845 // We might have a name in feedback, and a fixed array in the next slot.
3846 __ cmpp(key, feedback);
3847 __ j(not_equal, &miss);
3848 // If the name comparison succeeded, we know we have a fixed array with
3849 // at least one map/handler pair.
3850 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3851 FixedArray::kHeaderSize + kPointerSize));
3852 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false,
3853 &miss);
3854
3855 __ bind(&miss);
3856 KeyedLoadIC::GenerateMiss(masm);
3857
3858 __ bind(&load_smi_map);
3859 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3860 __ jmp(&compare_map);
3861}
3862
3863
3864void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3865 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3866 VectorStoreICStub stub(isolate(), state());
3867 stub.GenerateForTrampoline(masm);
3868}
3869
3870
3871void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3872 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3873 VectorKeyedStoreICStub stub(isolate(), state());
3874 stub.GenerateForTrampoline(masm);
3875}
3876
3877
3878void VectorStoreICStub::Generate(MacroAssembler* masm) {
3879 GenerateImpl(masm, false);
3880}
3881
3882
3883void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3884 GenerateImpl(masm, true);
3885}
3886
3887
3888void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3889 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3890 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3891 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3892 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3893 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3894 Register feedback = r8;
3895 Register integer_slot = r9;
3896 Register receiver_map = r11;
3897 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3898
3899 __ SmiToInteger32(integer_slot, slot);
3900 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3901 FixedArray::kHeaderSize));
3902
3903 // Try to quickly handle the monomorphic case without knowing for sure
3904 // if we have a weak cell in feedback. We do know it's safe to look
3905 // at WeakCell::kValueOffset.
3906 Label try_array, load_smi_map, compare_map;
3907 Label not_array, miss;
3908 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3909 integer_slot, &compare_map, &load_smi_map, &try_array);
3910
3911 // Is it a fixed array?
3912 __ bind(&try_array);
3913 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3914 __ j(not_equal, &not_array);
3915 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true,
3916 &miss);
3917
3918 __ bind(&not_array);
3919 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3920 __ j(not_equal, &miss);
3921
3922 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3923 Code::ComputeHandlerFlags(Code::STORE_IC));
3924 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3925 receiver, key, feedback, no_reg);
3926
3927 __ bind(&miss);
3928 StoreIC::GenerateMiss(masm);
3929
3930 __ bind(&load_smi_map);
3931 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3932 __ jmp(&compare_map);
3933}
3934
3935
3936void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3937 GenerateImpl(masm, false);
3938}
3939
3940
3941void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3942 GenerateImpl(masm, true);
3943}
3944
3945
3946static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3947 Register receiver_map,
3948 Register feedback, Register scratch,
3949 Register scratch1,
3950 Register scratch2, Label* miss) {
3951 // feedback initially contains the feedback array
3952 Label next, next_loop, prepare_next;
3953 Label transition_call;
3954
3955 Register cached_map = scratch;
3956 Register counter = scratch1;
3957 Register length = scratch2;
3958
3959 // Polymorphic, we have to loop from 0 to N - 1
3960 __ movp(counter, Immediate(0));
3961 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3962 __ SmiToInteger32(length, length);
3963
3964 __ bind(&next_loop);
3965 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3966 FixedArray::kHeaderSize));
3967 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3968 __ j(not_equal, &prepare_next);
3969 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3970 FixedArray::kHeaderSize + kPointerSize));
3971 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3972 __ j(not_equal, &transition_call);
3973 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3974 FixedArray::kHeaderSize + 2 * kPointerSize));
3975 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3976 __ jmp(feedback);
3977
3978 __ bind(&transition_call);
3979 DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3980 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3981 // The weak cell may have been cleared.
3982 __ JumpIfSmi(receiver_map, miss);
3983 // Get the handler in value.
3984 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3985 FixedArray::kHeaderSize + 2 * kPointerSize));
3986 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3987 __ jmp(feedback);
3988
3989 __ bind(&prepare_next);
3990 __ addl(counter, Immediate(3));
3991 __ cmpl(counter, length);
3992 __ j(less, &next_loop);
3993
3994 // We exhausted our array of map handler pairs.
3995 __ jmp(miss);
3996}
3997
3998
3999void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4000 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
4001 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
4002 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
4003 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
4004 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
4005 Register feedback = r8;
4006 Register integer_slot = r9;
4007 Register receiver_map = r11;
4008 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
4009
4010 __ SmiToInteger32(integer_slot, slot);
4011 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4012 FixedArray::kHeaderSize));
4013
4014 // Try to quickly handle the monomorphic case without knowing for sure
4015 // if we have a weak cell in feedback. We do know it's safe to look
4016 // at WeakCell::kValueOffset.
4017 Label try_array, load_smi_map, compare_map;
4018 Label not_array, miss;
4019 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
4020 integer_slot, &compare_map, &load_smi_map, &try_array);
4021
4022 // Is it a fixed array?
4023 __ bind(&try_array);
4024 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
4025 __ j(not_equal, &not_array);
4026 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
4027 r15, r14, &miss);
4028
4029 __ bind(&not_array);
4030 Label try_poly_name;
4031 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4032 __ j(not_equal, &try_poly_name);
4033
4034 Handle<Code> megamorphic_stub =
4035 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4036 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4037
4038 __ bind(&try_poly_name);
4039 // We might have a name in feedback, and a fixed array in the next slot.
4040 __ cmpp(key, feedback);
4041 __ j(not_equal, &miss);
4042 // If the name comparison succeeded, we know we have a fixed array with
4043 // at least one map/handler pair.
4044 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4045 FixedArray::kHeaderSize + kPointerSize));
4046 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
4047 &miss);
4048
4049 __ bind(&miss);
4050 KeyedStoreIC::GenerateMiss(masm);
4051
4052 __ bind(&load_smi_map);
4053 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4054 __ jmp(&compare_map);
4055}
4056
4057
4058void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4059 __ EmitLoadTypeFeedbackVector(rbx);
4060 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004061 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4062}
4063
4064
4065void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4066 if (masm->isolate()->function_entry_hook() != NULL) {
4067 ProfileEntryHookStub stub(masm->isolate());
4068 masm->CallStub(&stub);
4069 }
4070}
4071
4072
4073void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4074 // This stub can be called from essentially anywhere, so it needs to save
4075 // all volatile and callee-save registers.
4076 const size_t kNumSavedRegisters = 2;
4077 __ pushq(arg_reg_1);
4078 __ pushq(arg_reg_2);
4079
4080 // Calculate the original stack pointer and store it in the second arg.
4081 __ leap(arg_reg_2,
4082 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4083
4084 // Calculate the function address to the first arg.
4085 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4086 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4087
4088 // Save the remainder of the volatile registers.
4089 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4090
4091 // Call the entry hook function.
4092 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4093 Assembler::RelocInfoNone());
4094
4095 AllowExternalCallThatCantCauseGC scope(masm);
4096
4097 const int kArgumentCount = 2;
4098 __ PrepareCallCFunction(kArgumentCount);
4099 __ CallCFunction(rax, kArgumentCount);
4100
4101 // Restore volatile regs.
4102 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4103 __ popq(arg_reg_2);
4104 __ popq(arg_reg_1);
4105
4106 __ Ret();
4107}
4108
4109
4110template<class T>
4111static void CreateArrayDispatch(MacroAssembler* masm,
4112 AllocationSiteOverrideMode mode) {
4113 if (mode == DISABLE_ALLOCATION_SITES) {
4114 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4115 __ TailCallStub(&stub);
4116 } else if (mode == DONT_OVERRIDE) {
4117 int last_index = GetSequenceIndexFromFastElementsKind(
4118 TERMINAL_FAST_ELEMENTS_KIND);
4119 for (int i = 0; i <= last_index; ++i) {
4120 Label next;
4121 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4122 __ cmpl(rdx, Immediate(kind));
4123 __ j(not_equal, &next);
4124 T stub(masm->isolate(), kind);
4125 __ TailCallStub(&stub);
4126 __ bind(&next);
4127 }
4128
4129 // If we reached this point there is a problem.
4130 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4131 } else {
4132 UNREACHABLE();
4133 }
4134}
4135
4136
4137static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4138 AllocationSiteOverrideMode mode) {
4139 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4140 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4141 // rax - number of arguments
4142 // rdi - constructor?
4143 // rsp[0] - return address
4144 // rsp[8] - last argument
4145 Handle<Object> undefined_sentinel(
4146 masm->isolate()->heap()->undefined_value(),
4147 masm->isolate());
4148
4149 Label normal_sequence;
4150 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004151 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4152 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4153 STATIC_ASSERT(FAST_ELEMENTS == 2);
4154 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4155 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4156 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004157
4158 // is the low bit set? If so, we are holey and that is good.
4159 __ testb(rdx, Immediate(1));
4160 __ j(not_zero, &normal_sequence);
4161 }
4162
4163 // look at the first argument
4164 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4165 __ movp(rcx, args.GetArgumentOperand(0));
4166 __ testp(rcx, rcx);
4167 __ j(zero, &normal_sequence);
4168
4169 if (mode == DISABLE_ALLOCATION_SITES) {
4170 ElementsKind initial = GetInitialFastElementsKind();
4171 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4172
4173 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4174 holey_initial,
4175 DISABLE_ALLOCATION_SITES);
4176 __ TailCallStub(&stub_holey);
4177
4178 __ bind(&normal_sequence);
4179 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4180 initial,
4181 DISABLE_ALLOCATION_SITES);
4182 __ TailCallStub(&stub);
4183 } else if (mode == DONT_OVERRIDE) {
4184 // We are going to create a holey array, but our kind is non-holey.
4185 // Fix kind and retry (only if we have an allocation site in the slot).
4186 __ incl(rdx);
4187
4188 if (FLAG_debug_code) {
4189 Handle<Map> allocation_site_map =
4190 masm->isolate()->factory()->allocation_site_map();
4191 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4192 __ Assert(equal, kExpectedAllocationSite);
4193 }
4194
4195 // Save the resulting elements kind in type info. We can't just store r3
4196 // in the AllocationSite::transition_info field because elements kind is
4197 // restricted to a portion of the field...upper bits need to be left alone.
4198 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4199 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4200 Smi::FromInt(kFastElementsKindPackedToHoley));
4201
4202 __ bind(&normal_sequence);
4203 int last_index = GetSequenceIndexFromFastElementsKind(
4204 TERMINAL_FAST_ELEMENTS_KIND);
4205 for (int i = 0; i <= last_index; ++i) {
4206 Label next;
4207 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4208 __ cmpl(rdx, Immediate(kind));
4209 __ j(not_equal, &next);
4210 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4211 __ TailCallStub(&stub);
4212 __ bind(&next);
4213 }
4214
4215 // If we reached this point there is a problem.
4216 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4217 } else {
4218 UNREACHABLE();
4219 }
4220}
4221
4222
4223template<class T>
4224static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4225 int to_index = GetSequenceIndexFromFastElementsKind(
4226 TERMINAL_FAST_ELEMENTS_KIND);
4227 for (int i = 0; i <= to_index; ++i) {
4228 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4229 T stub(isolate, kind);
4230 stub.GetCode();
4231 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4232 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4233 stub1.GetCode();
4234 }
4235 }
4236}
4237
4238
4239void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4240 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4241 isolate);
4242 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4243 isolate);
4244 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4245 isolate);
4246}
4247
4248
4249void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4250 Isolate* isolate) {
4251 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4252 for (int i = 0; i < 2; i++) {
4253 // For internal arrays we only need a few things
4254 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4255 stubh1.GetCode();
4256 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4257 stubh2.GetCode();
4258 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4259 stubh3.GetCode();
4260 }
4261}
4262
4263
4264void ArrayConstructorStub::GenerateDispatchToArrayStub(
4265 MacroAssembler* masm,
4266 AllocationSiteOverrideMode mode) {
4267 if (argument_count() == ANY) {
4268 Label not_zero_case, not_one_case;
4269 __ testp(rax, rax);
4270 __ j(not_zero, &not_zero_case);
4271 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4272
4273 __ bind(&not_zero_case);
4274 __ cmpl(rax, Immediate(1));
4275 __ j(greater, &not_one_case);
4276 CreateArrayDispatchOneArgument(masm, mode);
4277
4278 __ bind(&not_one_case);
4279 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4280 } else if (argument_count() == NONE) {
4281 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4282 } else if (argument_count() == ONE) {
4283 CreateArrayDispatchOneArgument(masm, mode);
4284 } else if (argument_count() == MORE_THAN_ONE) {
4285 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4286 } else {
4287 UNREACHABLE();
4288 }
4289}
4290
4291
4292void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4293 // ----------- S t a t e -------------
4294 // -- rax : argc
4295 // -- rbx : AllocationSite or undefined
4296 // -- rdi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004297 // -- rdx : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004298 // -- rsp[0] : return address
4299 // -- rsp[8] : last argument
4300 // -----------------------------------
4301 if (FLAG_debug_code) {
4302 // The array construct code is only set for the global and natives
4303 // builtin Array functions which always have maps.
4304
4305 // Initial map for the builtin Array function should be a map.
4306 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4307 // Will both indicate a NULL and a Smi.
4308 STATIC_ASSERT(kSmiTag == 0);
4309 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4310 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4311 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4312 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4313
4314 // We should either have undefined in rbx or a valid AllocationSite
4315 __ AssertUndefinedOrAllocationSite(rbx);
4316 }
4317
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004318 // Enter the context of the Array function.
4319 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
4320
4321 Label subclassing;
4322 __ cmpp(rdi, rdx);
4323 __ j(not_equal, &subclassing);
4324
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004325 Label no_info;
4326 // If the feedback vector is the undefined value call an array constructor
4327 // that doesn't use AllocationSites.
4328 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4329 __ j(equal, &no_info);
4330
4331 // Only look at the lower 16 bits of the transition info.
4332 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4333 __ SmiToInteger32(rdx, rdx);
4334 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4335 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4336 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4337
4338 __ bind(&no_info);
4339 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004340
4341 // Subclassing
4342 __ bind(&subclassing);
4343 switch (argument_count()) {
4344 case ANY:
4345 case MORE_THAN_ONE: {
4346 StackArgumentsAccessor args(rsp, rax);
4347 __ movp(args.GetReceiverOperand(), rdi);
4348 __ addp(rax, Immediate(3));
4349 break;
4350 }
4351 case NONE: {
4352 StackArgumentsAccessor args(rsp, 0);
4353 __ movp(args.GetReceiverOperand(), rdi);
4354 __ Set(rax, 3);
4355 break;
4356 }
4357 case ONE: {
4358 StackArgumentsAccessor args(rsp, 1);
4359 __ movp(args.GetReceiverOperand(), rdi);
4360 __ Set(rax, 4);
4361 break;
4362 }
4363 }
4364 __ PopReturnAddressTo(rcx);
4365 __ Push(rdx);
4366 __ Push(rbx);
4367 __ PushReturnAddressFrom(rcx);
4368 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004369}
4370
4371
4372void InternalArrayConstructorStub::GenerateCase(
4373 MacroAssembler* masm, ElementsKind kind) {
4374 Label not_zero_case, not_one_case;
4375 Label normal_sequence;
4376
4377 __ testp(rax, rax);
4378 __ j(not_zero, &not_zero_case);
4379 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4380 __ TailCallStub(&stub0);
4381
4382 __ bind(&not_zero_case);
4383 __ cmpl(rax, Immediate(1));
4384 __ j(greater, &not_one_case);
4385
4386 if (IsFastPackedElementsKind(kind)) {
4387 // We might need to create a holey array
4388 // look at the first argument
4389 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4390 __ movp(rcx, args.GetArgumentOperand(0));
4391 __ testp(rcx, rcx);
4392 __ j(zero, &normal_sequence);
4393
4394 InternalArraySingleArgumentConstructorStub
4395 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4396 __ TailCallStub(&stub1_holey);
4397 }
4398
4399 __ bind(&normal_sequence);
4400 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4401 __ TailCallStub(&stub1);
4402
4403 __ bind(&not_one_case);
4404 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4405 __ TailCallStub(&stubN);
4406}
4407
4408
4409void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4410 // ----------- S t a t e -------------
4411 // -- rax : argc
4412 // -- rdi : constructor
4413 // -- rsp[0] : return address
4414 // -- rsp[8] : last argument
4415 // -----------------------------------
4416
4417 if (FLAG_debug_code) {
4418 // The array construct code is only set for the global and natives
4419 // builtin Array functions which always have maps.
4420
4421 // Initial map for the builtin Array function should be a map.
4422 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4423 // Will both indicate a NULL and a Smi.
4424 STATIC_ASSERT(kSmiTag == 0);
4425 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4426 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4427 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4428 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4429 }
4430
4431 // Figure out the right elements kind
4432 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4433
4434 // Load the map's "bit field 2" into |result|. We only need the first byte,
4435 // but the following masking takes care of that anyway.
4436 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4437 // Retrieve elements_kind from bit field 2.
4438 __ DecodeField<Map::ElementsKindBits>(rcx);
4439
4440 if (FLAG_debug_code) {
4441 Label done;
4442 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4443 __ j(equal, &done);
4444 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4445 __ Assert(equal,
4446 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4447 __ bind(&done);
4448 }
4449
4450 Label fast_elements_case;
4451 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4452 __ j(equal, &fast_elements_case);
4453 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4454
4455 __ bind(&fast_elements_case);
4456 GenerateCase(masm, FAST_ELEMENTS);
4457}
4458
4459
Ben Murdoch097c5b22016-05-18 11:27:45 +01004460void FastNewObjectStub::Generate(MacroAssembler* masm) {
4461 // ----------- S t a t e -------------
4462 // -- rdi : target
4463 // -- rdx : new target
4464 // -- rsi : context
4465 // -- rsp[0] : return address
4466 // -----------------------------------
4467 __ AssertFunction(rdi);
4468 __ AssertReceiver(rdx);
4469
4470 // Verify that the new target is a JSFunction.
4471 Label new_object;
4472 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
4473 __ j(not_equal, &new_object);
4474
4475 // Load the initial map and verify that it's in fact a map.
4476 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
4477 __ JumpIfSmi(rcx, &new_object);
4478 __ CmpObjectType(rcx, MAP_TYPE, rbx);
4479 __ j(not_equal, &new_object);
4480
4481 // Fall back to runtime if the target differs from the new target's
4482 // initial map constructor.
4483 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
4484 __ j(not_equal, &new_object);
4485
4486 // Allocate the JSObject on the heap.
4487 Label allocate, done_allocate;
4488 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4489 __ leal(rbx, Operand(rbx, times_pointer_size, 0));
4490 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4491 __ bind(&done_allocate);
4492
4493 // Initialize the JSObject fields.
4494 __ movp(Operand(rax, JSObject::kMapOffset), rcx);
4495 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
4496 __ movp(Operand(rax, JSObject::kPropertiesOffset), rbx);
4497 __ movp(Operand(rax, JSObject::kElementsOffset), rbx);
4498 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
4499 __ leap(rbx, Operand(rax, JSObject::kHeaderSize));
4500
4501 // ----------- S t a t e -------------
4502 // -- rax : result (untagged)
4503 // -- rbx : result fields (untagged)
4504 // -- rdi : result end (untagged)
4505 // -- rcx : initial map
4506 // -- rsi : context
4507 // -- rsp[0] : return address
4508 // -----------------------------------
4509
4510 // Perform in-object slack tracking if requested.
4511 Label slack_tracking;
4512 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4513 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
4514 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4515 Immediate(Map::ConstructionCounter::kMask));
4516 __ j(not_zero, &slack_tracking, Label::kNear);
4517 {
4518 // Initialize all in-object fields with undefined.
4519 __ InitializeFieldsWithFiller(rbx, rdi, r11);
4520
4521 // Add the object tag to make the JSObject real.
4522 STATIC_ASSERT(kHeapObjectTag == 1);
4523 __ incp(rax);
4524 __ Ret();
4525 }
4526 __ bind(&slack_tracking);
4527 {
4528 // Decrease generous allocation count.
4529 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4530 __ subl(FieldOperand(rcx, Map::kBitField3Offset),
4531 Immediate(1 << Map::ConstructionCounter::kShift));
4532
4533 // Initialize the in-object fields with undefined.
4534 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
4535 __ negp(rdx);
4536 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
4537 __ InitializeFieldsWithFiller(rbx, rdx, r11);
4538
4539 // Initialize the remaining (reserved) fields with one pointer filler map.
4540 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
4541 __ InitializeFieldsWithFiller(rdx, rdi, r11);
4542
4543 // Add the object tag to make the JSObject real.
4544 STATIC_ASSERT(kHeapObjectTag == 1);
4545 __ incp(rax);
4546
4547 // Check if we can finalize the instance size.
4548 Label finalize;
4549 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4550 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4551 Immediate(Map::ConstructionCounter::kMask));
4552 __ j(zero, &finalize, Label::kNear);
4553 __ Ret();
4554
4555 // Finalize the instance size.
4556 __ bind(&finalize);
4557 {
4558 FrameScope scope(masm, StackFrame::INTERNAL);
4559 __ Push(rax);
4560 __ Push(rcx);
4561 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4562 __ Pop(rax);
4563 }
4564 __ Ret();
4565 }
4566
4567 // Fall back to %AllocateInNewSpace.
4568 __ bind(&allocate);
4569 {
4570 FrameScope scope(masm, StackFrame::INTERNAL);
4571 __ Integer32ToSmi(rbx, rbx);
4572 __ Push(rcx);
4573 __ Push(rbx);
4574 __ CallRuntime(Runtime::kAllocateInNewSpace);
4575 __ Pop(rcx);
4576 }
4577 STATIC_ASSERT(kHeapObjectTag == 1);
4578 __ decp(rax);
4579 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4580 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
4581 __ jmp(&done_allocate);
4582
4583 // Fall back to %NewObject.
4584 __ bind(&new_object);
4585 __ PopReturnAddressTo(rcx);
4586 __ Push(rdi);
4587 __ Push(rdx);
4588 __ PushReturnAddressFrom(rcx);
4589 __ TailCallRuntime(Runtime::kNewObject);
4590}
4591
4592
4593void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4594 // ----------- S t a t e -------------
4595 // -- rdi : function
4596 // -- rsi : context
4597 // -- rbp : frame pointer
4598 // -- rsp[0] : return address
4599 // -----------------------------------
4600 __ AssertFunction(rdi);
4601
4602 // For Ignition we need to skip all possible handler/stub frames until
4603 // we reach the JavaScript frame for the function (similar to what the
4604 // runtime fallback implementation does). So make rdx point to that
4605 // JavaScript frame.
4606 {
4607 Label loop, loop_entry;
4608 __ movp(rdx, rbp);
4609 __ jmp(&loop_entry, Label::kNear);
4610 __ bind(&loop);
4611 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4612 __ bind(&loop_entry);
4613 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kMarkerOffset));
4614 __ j(not_equal, &loop);
4615 }
4616
4617 // Check if we have rest parameters (only possible if we have an
4618 // arguments adaptor frame below the function frame).
4619 Label no_rest_parameters;
4620 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4621 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
4622 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4623 __ j(not_equal, &no_rest_parameters, Label::kNear);
4624
4625 // Check if the arguments adaptor frame contains more arguments than
4626 // specified by the function's internal formal parameter count.
4627 Label rest_parameters;
4628 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4629 __ LoadSharedFunctionInfoSpecialField(
4630 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4631 __ SmiToInteger32(
4632 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4633 __ subl(rax, rcx);
4634 __ j(greater, &rest_parameters);
4635
4636 // Return an empty rest parameter array.
4637 __ bind(&no_rest_parameters);
4638 {
4639 // ----------- S t a t e -------------
4640 // -- rsi : context
4641 // -- rsp[0] : return address
4642 // -----------------------------------
4643
4644 // Allocate an empty rest parameter array.
4645 Label allocate, done_allocate;
4646 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, TAG_OBJECT);
4647 __ bind(&done_allocate);
4648
4649 // Setup the rest parameter array in rax.
4650 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4651 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4652 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4653 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4654 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
4655 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
4656 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4657 __ Ret();
4658
4659 // Fall back to %AllocateInNewSpace.
4660 __ bind(&allocate);
4661 {
4662 FrameScope scope(masm, StackFrame::INTERNAL);
4663 __ Push(Smi::FromInt(JSArray::kSize));
4664 __ CallRuntime(Runtime::kAllocateInNewSpace);
4665 }
4666 __ jmp(&done_allocate);
4667 }
4668
4669 __ bind(&rest_parameters);
4670 {
4671 // Compute the pointer to the first rest parameter (skippping the receiver).
4672 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4673 StandardFrameConstants::kCallerSPOffset -
4674 1 * kPointerSize));
4675
4676 // ----------- S t a t e -------------
4677 // -- rsi : context
4678 // -- rax : number of rest parameters
4679 // -- rbx : pointer to first rest parameters
4680 // -- rsp[0] : return address
4681 // -----------------------------------
4682
4683 // Allocate space for the rest parameter array plus the backing store.
4684 Label allocate, done_allocate;
4685 __ leal(rcx, Operand(rax, times_pointer_size,
4686 JSArray::kSize + FixedArray::kHeaderSize));
4687 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
4688 __ bind(&done_allocate);
4689
4690 // Compute the arguments.length in rdi.
4691 __ Integer32ToSmi(rdi, rax);
4692
4693 // Setup the elements array in rdx.
4694 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4695 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4696 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4697 {
4698 Label loop, done_loop;
4699 __ Set(rcx, 0);
4700 __ bind(&loop);
4701 __ cmpl(rcx, rax);
4702 __ j(equal, &done_loop, Label::kNear);
4703 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4704 __ movp(
4705 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4706 kScratchRegister);
4707 __ subp(rbx, Immediate(1 * kPointerSize));
4708 __ addl(rcx, Immediate(1));
4709 __ jmp(&loop);
4710 __ bind(&done_loop);
4711 }
4712
4713 // Setup the rest parameter array in rax.
4714 __ leap(rax,
4715 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4716 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4717 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4718 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4719 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4720 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
4721 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
4722 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4723 __ Ret();
4724
4725 // Fall back to %AllocateInNewSpace.
4726 __ bind(&allocate);
4727 {
4728 FrameScope scope(masm, StackFrame::INTERNAL);
4729 __ Integer32ToSmi(rax, rax);
4730 __ Integer32ToSmi(rcx, rcx);
4731 __ Push(rax);
4732 __ Push(rbx);
4733 __ Push(rcx);
4734 __ CallRuntime(Runtime::kAllocateInNewSpace);
4735 __ movp(rdx, rax);
4736 __ Pop(rbx);
4737 __ Pop(rax);
4738 __ SmiToInteger32(rax, rax);
4739 }
4740 __ jmp(&done_allocate);
4741 }
4742}
4743
4744
4745void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4746 // ----------- S t a t e -------------
4747 // -- rdi : function
4748 // -- rsi : context
4749 // -- rbp : frame pointer
4750 // -- rsp[0] : return address
4751 // -----------------------------------
4752 __ AssertFunction(rdi);
4753
4754 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4755 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4756 __ LoadSharedFunctionInfoSpecialField(
4757 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4758 __ leap(rdx, Operand(rbp, rcx, times_pointer_size,
4759 StandardFrameConstants::kCallerSPOffset));
4760 __ Integer32ToSmi(rcx, rcx);
4761
4762 // rcx : number of parameters (tagged)
4763 // rdx : parameters pointer
4764 // rdi : function
4765 // rsp[0] : return address
4766 // Registers used over the whole function:
4767 // rbx: the mapped parameter count (untagged)
4768 // rax: the allocated object (tagged).
4769 Factory* factory = isolate()->factory();
4770
4771 __ SmiToInteger64(rbx, rcx);
4772 // rbx = parameter count (untagged)
4773
4774 // Check if the calling frame is an arguments adaptor frame.
4775 Label adaptor_frame, try_allocate, runtime;
4776 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4777 __ movp(r8, Operand(rax, StandardFrameConstants::kContextOffset));
4778 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4779 __ j(equal, &adaptor_frame);
4780
4781 // No adaptor, parameter count = argument count.
4782 __ movp(r11, rbx);
4783 __ jmp(&try_allocate, Label::kNear);
4784
4785 // We have an adaptor frame. Patch the parameters pointer.
4786 __ bind(&adaptor_frame);
4787 __ SmiToInteger64(
4788 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
4789 __ leap(rdx, Operand(rax, r11, times_pointer_size,
4790 StandardFrameConstants::kCallerSPOffset));
4791
4792 // rbx = parameter count (untagged)
4793 // r11 = argument count (untagged)
4794 // Compute the mapped parameter count = min(rbx, r11) in rbx.
4795 __ cmpp(rbx, r11);
4796 __ j(less_equal, &try_allocate, Label::kNear);
4797 __ movp(rbx, r11);
4798
4799 __ bind(&try_allocate);
4800
4801 // Compute the sizes of backing store, parameter map, and arguments object.
4802 // 1. Parameter map, has 2 extra words containing context and backing store.
4803 const int kParameterMapHeaderSize =
4804 FixedArray::kHeaderSize + 2 * kPointerSize;
4805 Label no_parameter_map;
4806 __ xorp(r8, r8);
4807 __ testp(rbx, rbx);
4808 __ j(zero, &no_parameter_map, Label::kNear);
4809 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
4810 __ bind(&no_parameter_map);
4811
4812 // 2. Backing store.
4813 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
4814
4815 // 3. Arguments object.
4816 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
4817
4818 // Do the allocation of all three objects in one go.
4819 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT);
4820
4821 // rax = address of new object(s) (tagged)
4822 // r11 = argument count (untagged)
4823 // Get the arguments map from the current native context into r9.
4824 Label has_mapped_parameters, instantiate;
4825 __ movp(r9, NativeContextOperand());
4826 __ testp(rbx, rbx);
4827 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4828
4829 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
4830 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
4831 __ jmp(&instantiate, Label::kNear);
4832
4833 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
4834 __ bind(&has_mapped_parameters);
4835 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
4836 __ bind(&instantiate);
4837
4838 // rax = address of new object (tagged)
4839 // rbx = mapped parameter count (untagged)
4840 // r11 = argument count (untagged)
4841 // r9 = address of arguments map (tagged)
4842 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
4843 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
4844 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
4845 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
4846
4847 // Set up the callee in-object property.
4848 __ AssertNotSmi(rdi);
4849 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
4850
4851 // Use the length (smi tagged) and set that as an in-object property too.
4852 // Note: r11 is tagged from here on.
4853 __ Integer32ToSmi(r11, r11);
4854 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
4855
4856 // Set up the elements pointer in the allocated arguments object.
4857 // If we allocated a parameter map, rdi will point there, otherwise to the
4858 // backing store.
4859 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
4860 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
4861
4862 // rax = address of new object (tagged)
4863 // rbx = mapped parameter count (untagged)
4864 // r11 = argument count (tagged)
4865 // rdi = address of parameter map or backing store (tagged)
4866
4867 // Initialize parameter map. If there are no mapped arguments, we're done.
4868 Label skip_parameter_map;
4869 __ testp(rbx, rbx);
4870 __ j(zero, &skip_parameter_map);
4871
4872 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
4873 // rbx contains the untagged argument count. Add 2 and tag to write.
4874 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
4875 __ Integer64PlusConstantToSmi(r9, rbx, 2);
4876 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
4877 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
4878 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4879 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
4880
4881 // Copy the parameter slots and the holes in the arguments.
4882 // We need to fill in mapped_parameter_count slots. They index the context,
4883 // where parameters are stored in reverse order, at
4884 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4885 // The mapped parameter thus need to get indices
4886 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4887 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4888 // We loop from right to left.
4889 Label parameters_loop, parameters_test;
4890
4891 // Load tagged parameter count into r9.
4892 __ Integer32ToSmi(r9, rbx);
4893 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
4894 __ addp(r8, rcx);
4895 __ subp(r8, r9);
4896 __ movp(rcx, rdi);
4897 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4898 __ SmiToInteger64(r9, r9);
4899 // r9 = loop variable (untagged)
4900 // r8 = mapping index (tagged)
4901 // rcx = address of parameter map (tagged)
4902 // rdi = address of backing store (tagged)
4903 __ jmp(&parameters_test, Label::kNear);
4904
4905 __ bind(&parameters_loop);
4906 __ subp(r9, Immediate(1));
4907 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
4908 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
4909 r8);
4910 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
4911 kScratchRegister);
4912 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
4913 __ bind(&parameters_test);
4914 __ testp(r9, r9);
4915 __ j(not_zero, &parameters_loop, Label::kNear);
4916
4917 __ bind(&skip_parameter_map);
4918
4919 // r11 = argument count (tagged)
4920 // rdi = address of backing store (tagged)
4921 // Copy arguments header and remaining slots (if there are any).
4922 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
4923 factory->fixed_array_map());
4924 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
4925
4926 Label arguments_loop, arguments_test;
4927 __ movp(r8, rbx);
4928 // Untag r11 for the loop below.
4929 __ SmiToInteger64(r11, r11);
4930 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
4931 __ subp(rdx, kScratchRegister);
4932 __ jmp(&arguments_test, Label::kNear);
4933
4934 __ bind(&arguments_loop);
4935 __ subp(rdx, Immediate(kPointerSize));
4936 __ movp(r9, Operand(rdx, 0));
4937 __ movp(FieldOperand(rdi, r8,
4938 times_pointer_size,
4939 FixedArray::kHeaderSize),
4940 r9);
4941 __ addp(r8, Immediate(1));
4942
4943 __ bind(&arguments_test);
4944 __ cmpp(r8, r11);
4945 __ j(less, &arguments_loop, Label::kNear);
4946
4947 // Return.
4948 __ ret(0);
4949
4950 // Do the runtime call to allocate the arguments object.
4951 // r11 = argument count (untagged)
4952 __ bind(&runtime);
4953 __ Integer32ToSmi(r11, r11);
4954 __ PopReturnAddressTo(rax);
4955 __ Push(rdi); // Push function.
4956 __ Push(rdx); // Push parameters pointer.
4957 __ Push(r11); // Push parameter count.
4958 __ PushReturnAddressFrom(rax);
4959 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4960}
4961
4962
4963void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4964 // ----------- S t a t e -------------
4965 // -- rdi : function
4966 // -- rsi : context
4967 // -- rbp : frame pointer
4968 // -- rsp[0] : return address
4969 // -----------------------------------
4970 __ AssertFunction(rdi);
4971
4972 // For Ignition we need to skip all possible handler/stub frames until
4973 // we reach the JavaScript frame for the function (similar to what the
4974 // runtime fallback implementation does). So make rdx point to that
4975 // JavaScript frame.
4976 {
4977 Label loop, loop_entry;
4978 __ movp(rdx, rbp);
4979 __ jmp(&loop_entry, Label::kNear);
4980 __ bind(&loop);
4981 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4982 __ bind(&loop_entry);
4983 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kMarkerOffset));
4984 __ j(not_equal, &loop);
4985 }
4986
4987 // Check if we have an arguments adaptor frame below the function frame.
4988 Label arguments_adaptor, arguments_done;
4989 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4990 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
4991 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4992 __ j(equal, &arguments_adaptor, Label::kNear);
4993 {
4994 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4995 __ LoadSharedFunctionInfoSpecialField(
4996 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
4997 __ leap(rbx, Operand(rdx, rax, times_pointer_size,
4998 StandardFrameConstants::kCallerSPOffset -
4999 1 * kPointerSize));
5000 }
5001 __ jmp(&arguments_done, Label::kNear);
5002 __ bind(&arguments_adaptor);
5003 {
5004 __ SmiToInteger32(
5005 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
5006 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
5007 StandardFrameConstants::kCallerSPOffset -
5008 1 * kPointerSize));
5009 }
5010 __ bind(&arguments_done);
5011
5012 // ----------- S t a t e -------------
5013 // -- rax : number of arguments
5014 // -- rbx : pointer to the first argument
5015 // -- rsi : context
5016 // -- rsp[0] : return address
5017 // -----------------------------------
5018
5019 // Allocate space for the strict arguments object plus the backing store.
5020 Label allocate, done_allocate;
5021 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
5022 FixedArray::kHeaderSize));
5023 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
5024 __ bind(&done_allocate);
5025
5026 // Compute the arguments.length in rdi.
5027 __ Integer32ToSmi(rdi, rax);
5028
5029 // Setup the elements array in rdx.
5030 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
5031 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
5032 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
5033 {
5034 Label loop, done_loop;
5035 __ Set(rcx, 0);
5036 __ bind(&loop);
5037 __ cmpl(rcx, rax);
5038 __ j(equal, &done_loop, Label::kNear);
5039 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
5040 __ movp(
5041 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
5042 kScratchRegister);
5043 __ subp(rbx, Immediate(1 * kPointerSize));
5044 __ addl(rcx, Immediate(1));
5045 __ jmp(&loop);
5046 __ bind(&done_loop);
5047 }
5048
5049 // Setup the strict arguments object in rax.
5050 __ leap(rax,
5051 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
5052 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
5053 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
5054 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
5055 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
5056 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
5057 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
5058 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5059 __ Ret();
5060
5061 // Fall back to %AllocateInNewSpace.
5062 __ bind(&allocate);
5063 {
5064 FrameScope scope(masm, StackFrame::INTERNAL);
5065 __ Integer32ToSmi(rax, rax);
5066 __ Integer32ToSmi(rcx, rcx);
5067 __ Push(rax);
5068 __ Push(rbx);
5069 __ Push(rcx);
5070 __ CallRuntime(Runtime::kAllocateInNewSpace);
5071 __ movp(rdx, rax);
5072 __ Pop(rbx);
5073 __ Pop(rax);
5074 __ SmiToInteger32(rax, rax);
5075 }
5076 __ jmp(&done_allocate);
5077}
5078
5079
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005080void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5081 Register context_reg = rsi;
5082 Register slot_reg = rbx;
5083 Register result_reg = rax;
5084 Label slow_case;
5085
5086 // Go up context chain to the script context.
5087 for (int i = 0; i < depth(); ++i) {
5088 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5089 context_reg = rdi;
5090 }
5091
5092 // Load the PropertyCell value at the specified slot.
5093 __ movp(result_reg, ContextOperand(context_reg, slot_reg));
5094 __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
5095
5096 // Check that value is not the_hole.
5097 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
5098 __ j(equal, &slow_case, Label::kNear);
5099 __ Ret();
5100
5101 // Fallback to the runtime.
5102 __ bind(&slow_case);
5103 __ Integer32ToSmi(slot_reg, slot_reg);
5104 __ PopReturnAddressTo(kScratchRegister);
5105 __ Push(slot_reg);
5106 __ Push(kScratchRegister);
5107 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5108}
5109
5110
5111void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5112 Register context_reg = rsi;
5113 Register slot_reg = rbx;
5114 Register value_reg = rax;
5115 Register cell_reg = r8;
5116 Register cell_details_reg = rdx;
5117 Register cell_value_reg = r9;
5118 Label fast_heapobject_case, fast_smi_case, slow_case;
5119
5120 if (FLAG_debug_code) {
5121 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
5122 __ Check(not_equal, kUnexpectedValue);
5123 }
5124
5125 // Go up context chain to the script context.
5126 for (int i = 0; i < depth(); ++i) {
5127 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5128 context_reg = rdi;
5129 }
5130
5131 // Load the PropertyCell at the specified slot.
5132 __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
5133
5134 // Load PropertyDetails for the cell (actually only the cell_type, kind and
5135 // READ_ONLY bit of attributes).
5136 __ SmiToInteger32(cell_details_reg,
5137 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
5138 __ andl(cell_details_reg,
5139 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5140 PropertyDetails::KindField::kMask |
5141 PropertyDetails::kAttributesReadOnlyMask));
5142
5143 // Check if PropertyCell holds mutable data.
5144 Label not_mutable_data;
5145 __ cmpl(cell_details_reg,
5146 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5147 PropertyCellType::kMutable) |
5148 PropertyDetails::KindField::encode(kData)));
5149 __ j(not_equal, &not_mutable_data);
5150 __ JumpIfSmi(value_reg, &fast_smi_case);
5151 __ bind(&fast_heapobject_case);
5152 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5153 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5154 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5155 OMIT_SMI_CHECK);
5156 // RecordWriteField clobbers the value register, so we need to reload.
5157 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5158 __ Ret();
5159 __ bind(&not_mutable_data);
5160
5161 // Check if PropertyCell value matches the new value (relevant for Constant,
5162 // ConstantType and Undefined cells).
5163 Label not_same_value;
5164 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5165 __ cmpp(cell_value_reg, value_reg);
5166 __ j(not_equal, &not_same_value,
5167 FLAG_debug_code ? Label::kFar : Label::kNear);
5168 // Make sure the PropertyCell is not marked READ_ONLY.
5169 __ testl(cell_details_reg,
5170 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5171 __ j(not_zero, &slow_case);
5172 if (FLAG_debug_code) {
5173 Label done;
5174 // This can only be true for Constant, ConstantType and Undefined cells,
5175 // because we never store the_hole via this stub.
5176 __ cmpl(cell_details_reg,
5177 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5178 PropertyCellType::kConstant) |
5179 PropertyDetails::KindField::encode(kData)));
5180 __ j(equal, &done);
5181 __ cmpl(cell_details_reg,
5182 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5183 PropertyCellType::kConstantType) |
5184 PropertyDetails::KindField::encode(kData)));
5185 __ j(equal, &done);
5186 __ cmpl(cell_details_reg,
5187 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5188 PropertyCellType::kUndefined) |
5189 PropertyDetails::KindField::encode(kData)));
5190 __ Check(equal, kUnexpectedValue);
5191 __ bind(&done);
5192 }
5193 __ Ret();
5194 __ bind(&not_same_value);
5195
5196 // Check if PropertyCell contains data with constant type (and is not
5197 // READ_ONLY).
5198 __ cmpl(cell_details_reg,
5199 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5200 PropertyCellType::kConstantType) |
5201 PropertyDetails::KindField::encode(kData)));
5202 __ j(not_equal, &slow_case, Label::kNear);
5203
5204 // Now either both old and new values must be SMIs or both must be heap
5205 // objects with same map.
5206 Label value_is_heap_object;
5207 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5208 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5209 // Old and new values are SMIs, no need for a write barrier here.
5210 __ bind(&fast_smi_case);
5211 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5212 __ Ret();
5213 __ bind(&value_is_heap_object);
5214 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5215 Register cell_value_map_reg = cell_value_reg;
5216 __ movp(cell_value_map_reg,
5217 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5218 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5219 __ j(equal, &fast_heapobject_case);
5220
5221 // Fallback to the runtime.
5222 __ bind(&slow_case);
5223 __ Integer32ToSmi(slot_reg, slot_reg);
5224 __ PopReturnAddressTo(kScratchRegister);
5225 __ Push(slot_reg);
5226 __ Push(value_reg);
5227 __ Push(kScratchRegister);
5228 __ TailCallRuntime(is_strict(language_mode())
5229 ? Runtime::kStoreGlobalViaContext_Strict
5230 : Runtime::kStoreGlobalViaContext_Sloppy);
5231}
5232
5233
5234static int Offset(ExternalReference ref0, ExternalReference ref1) {
5235 int64_t offset = (ref0.address() - ref1.address());
5236 // Check that fits into int.
5237 DCHECK(static_cast<int>(offset) == offset);
5238 return static_cast<int>(offset);
5239}
5240
5241
5242// Prepares stack to put arguments (aligns and so on). WIN64 calling
5243// convention requires to put the pointer to the return value slot into
5244// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
5245// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
5246// inside the exit frame (not GCed) accessible via StackSpaceOperand.
5247static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
5248 __ EnterApiExitFrame(arg_stack_space);
5249}
5250
5251
5252// Calls an API function. Allocates HandleScope, extracts returned value
5253// from handle and propagates exceptions. Clobbers r14, r15, rbx and
5254// caller-save registers. Restores context. On return removes
5255// stack_space * kPointerSize (GCed).
5256static void CallApiFunctionAndReturn(MacroAssembler* masm,
5257 Register function_address,
5258 ExternalReference thunk_ref,
5259 Register thunk_last_arg, int stack_space,
5260 Operand* stack_space_operand,
5261 Operand return_value_operand,
5262 Operand* context_restore_operand) {
5263 Label prologue;
5264 Label promote_scheduled_exception;
5265 Label delete_allocated_handles;
5266 Label leave_exit_frame;
5267 Label write_back;
5268
5269 Isolate* isolate = masm->isolate();
5270 Factory* factory = isolate->factory();
5271 ExternalReference next_address =
5272 ExternalReference::handle_scope_next_address(isolate);
5273 const int kNextOffset = 0;
5274 const int kLimitOffset = Offset(
5275 ExternalReference::handle_scope_limit_address(isolate), next_address);
5276 const int kLevelOffset = Offset(
5277 ExternalReference::handle_scope_level_address(isolate), next_address);
5278 ExternalReference scheduled_exception_address =
5279 ExternalReference::scheduled_exception_address(isolate);
5280
5281 DCHECK(rdx.is(function_address) || r8.is(function_address));
5282 // Allocate HandleScope in callee-save registers.
5283 Register prev_next_address_reg = r14;
5284 Register prev_limit_reg = rbx;
5285 Register base_reg = r15;
5286 __ Move(base_reg, next_address);
5287 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
5288 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5289 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
5290
5291 if (FLAG_log_timer_events) {
5292 FrameScope frame(masm, StackFrame::MANUAL);
5293 __ PushSafepointRegisters();
5294 __ PrepareCallCFunction(1);
5295 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5296 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5297 1);
5298 __ PopSafepointRegisters();
5299 }
5300
5301 Label profiler_disabled;
5302 Label end_profiler_check;
5303 __ Move(rax, ExternalReference::is_profiling_address(isolate));
5304 __ cmpb(Operand(rax, 0), Immediate(0));
5305 __ j(zero, &profiler_disabled);
5306
5307 // Third parameter is the address of the actual getter function.
5308 __ Move(thunk_last_arg, function_address);
5309 __ Move(rax, thunk_ref);
5310 __ jmp(&end_profiler_check);
5311
5312 __ bind(&profiler_disabled);
5313 // Call the api function!
5314 __ Move(rax, function_address);
5315
5316 __ bind(&end_profiler_check);
5317
5318 // Call the api function!
5319 __ call(rax);
5320
5321 if (FLAG_log_timer_events) {
5322 FrameScope frame(masm, StackFrame::MANUAL);
5323 __ PushSafepointRegisters();
5324 __ PrepareCallCFunction(1);
5325 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5326 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5327 1);
5328 __ PopSafepointRegisters();
5329 }
5330
5331 // Load the value from ReturnValue
5332 __ movp(rax, return_value_operand);
5333 __ bind(&prologue);
5334
5335 // No more valid handles (the result handle was the last one). Restore
5336 // previous handle scope.
5337 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
5338 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
5339 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5340 __ j(not_equal, &delete_allocated_handles);
5341
5342 // Leave the API exit frame.
5343 __ bind(&leave_exit_frame);
5344 bool restore_context = context_restore_operand != NULL;
5345 if (restore_context) {
5346 __ movp(rsi, *context_restore_operand);
5347 }
5348 if (stack_space_operand != nullptr) {
5349 __ movp(rbx, *stack_space_operand);
5350 }
5351 __ LeaveApiExitFrame(!restore_context);
5352
5353 // Check if the function scheduled an exception.
5354 __ Move(rdi, scheduled_exception_address);
5355 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5356 __ j(not_equal, &promote_scheduled_exception);
5357
5358#if DEBUG
5359 // Check if the function returned a valid JavaScript value.
5360 Label ok;
5361 Register return_value = rax;
5362 Register map = rcx;
5363
5364 __ JumpIfSmi(return_value, &ok, Label::kNear);
5365 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
5366
5367 __ CmpInstanceType(map, LAST_NAME_TYPE);
5368 __ j(below_equal, &ok, Label::kNear);
5369
5370 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5371 __ j(above_equal, &ok, Label::kNear);
5372
5373 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5374 __ j(equal, &ok, Label::kNear);
5375
5376 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
5377 __ j(equal, &ok, Label::kNear);
5378
5379 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
5380 __ j(equal, &ok, Label::kNear);
5381
5382 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
5383 __ j(equal, &ok, Label::kNear);
5384
5385 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
5386 __ j(equal, &ok, Label::kNear);
5387
5388 __ Abort(kAPICallReturnedInvalidObject);
5389
5390 __ bind(&ok);
5391#endif
5392
5393 if (stack_space_operand != nullptr) {
5394 DCHECK_EQ(stack_space, 0);
5395 __ PopReturnAddressTo(rcx);
5396 __ addq(rsp, rbx);
5397 __ jmp(rcx);
5398 } else {
5399 __ ret(stack_space * kPointerSize);
5400 }
5401
5402 // Re-throw by promoting a scheduled exception.
5403 __ bind(&promote_scheduled_exception);
5404 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5405
5406 // HandleScope limit has changed. Delete allocated extensions.
5407 __ bind(&delete_allocated_handles);
5408 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
5409 __ movp(prev_limit_reg, rax);
5410 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5411 __ LoadAddress(rax,
5412 ExternalReference::delete_handle_scope_extensions(isolate));
5413 __ call(rax);
5414 __ movp(rax, prev_limit_reg);
5415 __ jmp(&leave_exit_frame);
5416}
5417
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005418static void CallApiFunctionStubHelper(MacroAssembler* masm,
5419 const ParameterCount& argc,
5420 bool return_first_arg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005421 bool call_data_undefined, bool is_lazy) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005422 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005423 // -- rdi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005424 // -- rbx : call_data
5425 // -- rcx : holder
5426 // -- rdx : api_function_address
5427 // -- rsi : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005428 // -- rax : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005429 // -- rsp[0] : return address
5430 // -- rsp[8] : last argument
5431 // -- ...
5432 // -- rsp[argc * 8] : first argument
5433 // -- rsp[(argc + 1) * 8] : receiver
5434 // -----------------------------------
5435
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005436 Register callee = rdi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005437 Register call_data = rbx;
5438 Register holder = rcx;
5439 Register api_function_address = rdx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005440 Register context = rsi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005441 Register return_address = r8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005442
5443 typedef FunctionCallbackArguments FCA;
5444
5445 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5446 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5447 STATIC_ASSERT(FCA::kDataIndex == 4);
5448 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5449 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5450 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5451 STATIC_ASSERT(FCA::kHolderIndex == 0);
5452 STATIC_ASSERT(FCA::kArgsLength == 7);
5453
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005454 DCHECK(argc.is_immediate() || rax.is(argc.reg()));
5455
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005456 __ PopReturnAddressTo(return_address);
5457
5458 // context save
5459 __ Push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005460
5461 // callee
5462 __ Push(callee);
5463
5464 // call data
5465 __ Push(call_data);
5466 Register scratch = call_data;
5467 if (!call_data_undefined) {
5468 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5469 }
5470 // return value
5471 __ Push(scratch);
5472 // return value default
5473 __ Push(scratch);
5474 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005475 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005476 __ Push(scratch);
5477 // holder
5478 __ Push(holder);
5479
5480 __ movp(scratch, rsp);
5481 // Push return address back on stack.
5482 __ PushReturnAddressFrom(return_address);
5483
Ben Murdoch097c5b22016-05-18 11:27:45 +01005484 if (!is_lazy) {
5485 // load context from callee
5486 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5487 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005488
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005489 // Allocate the v8::Arguments structure in the arguments' space since
5490 // it's not controlled by GC.
5491 const int kApiStackSpace = 4;
5492
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005493 PrepareCallApiFunction(masm, kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005494
5495 // FunctionCallbackInfo::implicit_args_.
5496 __ movp(StackSpaceOperand(0), scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005497 if (argc.is_immediate()) {
5498 __ addp(scratch, Immediate((argc.immediate() + FCA::kArgsLength - 1) *
5499 kPointerSize));
5500 // FunctionCallbackInfo::values_.
5501 __ movp(StackSpaceOperand(1), scratch);
5502 // FunctionCallbackInfo::length_.
5503 __ Set(StackSpaceOperand(2), argc.immediate());
5504 // FunctionCallbackInfo::is_construct_call_.
5505 __ Set(StackSpaceOperand(3), 0);
5506 } else {
5507 __ leap(scratch, Operand(scratch, argc.reg(), times_pointer_size,
5508 (FCA::kArgsLength - 1) * kPointerSize));
5509 // FunctionCallbackInfo::values_.
5510 __ movp(StackSpaceOperand(1), scratch);
5511 // FunctionCallbackInfo::length_.
5512 __ movp(StackSpaceOperand(2), argc.reg());
5513 // FunctionCallbackInfo::is_construct_call_.
5514 __ leap(argc.reg(), Operand(argc.reg(), times_pointer_size,
5515 (FCA::kArgsLength + 1) * kPointerSize));
5516 __ movp(StackSpaceOperand(3), argc.reg());
5517 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005518
5519#if defined(__MINGW64__) || defined(_WIN64)
5520 Register arguments_arg = rcx;
5521 Register callback_arg = rdx;
5522#else
5523 Register arguments_arg = rdi;
5524 Register callback_arg = rsi;
5525#endif
5526
5527 // It's okay if api_function_address == callback_arg
5528 // but not arguments_arg
5529 DCHECK(!api_function_address.is(arguments_arg));
5530
5531 // v8::InvocationCallback's argument.
5532 __ leap(arguments_arg, StackSpaceOperand(0));
5533
5534 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005535 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005536
5537 // Accessor for FunctionCallbackInfo and first js arg.
5538 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5539 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5540 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5541 FCA::kArgsLength - FCA::kContextSaveIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005542 Operand is_construct_call_operand = StackSpaceOperand(3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005543 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005544 return_first_arg ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5545 int stack_space = 0;
5546 Operand* stack_space_operand = &is_construct_call_operand;
5547 if (argc.is_immediate()) {
5548 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5549 stack_space_operand = nullptr;
5550 }
5551 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5552 stack_space, stack_space_operand,
5553 return_value_operand, &context_restore_operand);
5554}
5555
5556
5557void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5558 bool call_data_undefined = this->call_data_undefined();
5559 CallApiFunctionStubHelper(masm, ParameterCount(rax), false,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005560 call_data_undefined, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005561}
5562
5563
5564void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5565 bool is_store = this->is_store();
5566 int argc = this->argc();
5567 bool call_data_undefined = this->call_data_undefined();
Ben Murdoch097c5b22016-05-18 11:27:45 +01005568 bool is_lazy = this->is_lazy();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005569 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005570 call_data_undefined, is_lazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005571}
5572
5573
5574void CallApiGetterStub::Generate(MacroAssembler* masm) {
5575 // ----------- S t a t e -------------
Ben Murdoch097c5b22016-05-18 11:27:45 +01005576 // -- rsp[0] : return address
5577 // -- rsp[8] : name
5578 // -- rsp[16 .. (16 + kArgsLength*8)] : v8::PropertyCallbackInfo::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005579 // -- ...
Ben Murdoch097c5b22016-05-18 11:27:45 +01005580 // -- r8 : api_function_address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005581 // -----------------------------------
5582
5583#if defined(__MINGW64__) || defined(_WIN64)
5584 Register getter_arg = r8;
5585 Register accessor_info_arg = rdx;
5586 Register name_arg = rcx;
5587#else
5588 Register getter_arg = rdx;
5589 Register accessor_info_arg = rsi;
5590 Register name_arg = rdi;
5591#endif
5592 Register api_function_address = ApiGetterDescriptor::function_address();
5593 DCHECK(api_function_address.is(r8));
5594 Register scratch = rax;
5595
Ben Murdoch097c5b22016-05-18 11:27:45 +01005596 // v8::PropertyCallbackInfo::args_ array and name handle.
5597 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005598
Ben Murdoch097c5b22016-05-18 11:27:45 +01005599 // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005600 const int kArgStackSpace = 1;
5601
Ben Murdoch097c5b22016-05-18 11:27:45 +01005602 // Load address of v8::PropertyAccessorInfo::args_ array.
5603 __ leap(scratch, Operand(rsp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005605 PrepareCallApiFunction(masm, kArgStackSpace);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005606 // Create v8::PropertyCallbackInfo object on the stack and initialize
5607 // it's args_ field.
5608 Operand info_object = StackSpaceOperand(0);
5609 __ movp(info_object, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005610
Ben Murdoch097c5b22016-05-18 11:27:45 +01005611 __ leap(name_arg, Operand(scratch, -kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005612 // The context register (rsi) has been saved in PrepareCallApiFunction and
5613 // could be used to pass arguments.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005614 __ leap(accessor_info_arg, info_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005615
5616 ExternalReference thunk_ref =
5617 ExternalReference::invoke_accessor_getter_callback(isolate());
5618
5619 // It's okay if api_function_address == getter_arg
5620 // but not accessor_info_arg or name_arg
5621 DCHECK(!api_function_address.is(accessor_info_arg) &&
5622 !api_function_address.is(name_arg));
5623
Ben Murdoch097c5b22016-05-18 11:27:45 +01005624 // +3 is to skip prolog, return address and name handle.
5625 Operand return_value_operand(
5626 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005627 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005628 kStackUnwindSpace, nullptr, return_value_operand,
5629 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005630}
5631
5632
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005633#undef __
5634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005635} // namespace internal
5636} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005637
5638#endif // V8_TARGET_ARCH_X64