blob: e737801f588f990b20c497072c8ab6eb513e6ea1 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-stubs.h"
Ben Murdochda12d292016-06-02 14:46:10 +01008#include "src/api-arguments.h"
9#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/regexp/jsregexp.h"
16#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000018#include "src/x64/code-stubs-x64.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010019
20namespace v8 {
21namespace internal {
22
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023
24static void InitializeArrayConstructorDescriptor(
25 Isolate* isolate, CodeStubDescriptor* descriptor,
26 int constant_stack_parameter_count) {
27 Address deopt_handler = Runtime::FunctionForId(
28 Runtime::kArrayConstructor)->entry;
29
30 if (constant_stack_parameter_count == 0) {
31 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
32 JS_FUNCTION_STUB_MODE);
33 } else {
34 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036 }
37}
38
39
40static void InitializeInternalArrayConstructorDescriptor(
41 Isolate* isolate, CodeStubDescriptor* descriptor,
42 int constant_stack_parameter_count) {
43 Address deopt_handler = Runtime::FunctionForId(
44 Runtime::kInternalArrayConstructor)->entry;
45
46 if (constant_stack_parameter_count == 0) {
47 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
48 JS_FUNCTION_STUB_MODE);
49 } else {
50 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 }
53}
54
55
56void ArrayNoArgumentConstructorStub::InitializeDescriptor(
57 CodeStubDescriptor* descriptor) {
58 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
59}
60
61
62void ArraySingleArgumentConstructorStub::InitializeDescriptor(
63 CodeStubDescriptor* descriptor) {
64 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
65}
66
67
68void ArrayNArgumentsConstructorStub::InitializeDescriptor(
69 CodeStubDescriptor* descriptor) {
70 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
71}
72
73
74void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
75 CodeStubDescriptor* descriptor) {
76 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
77}
78
Ben Murdochda12d292016-06-02 14:46:10 +010079void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
80 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
81 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
82}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083
84void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
85 CodeStubDescriptor* descriptor) {
86 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
87}
88
89
90void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
91 CodeStubDescriptor* descriptor) {
92 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
93}
94
95
Kristian Monsen80d68ea2010-09-08 11:05:35 +010096#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010097
Ben Murdochb8a8cc12014-11-26 15:28:44 +000098
99void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
100 ExternalReference miss) {
101 // Update the static counter each time a new code stub is generated.
102 isolate()->counters()->code_stubs()->Increment();
103
104 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000105 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 {
107 // Call the runtime system in a fresh internal frame.
108 FrameScope scope(masm, StackFrame::INTERNAL);
109 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000110 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 // Push arguments
112 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 __ Push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 }
115 __ CallExternalReference(miss, param_count);
116 }
117
Steve Block1e0659c2011-05-24 12:43:12 +0100118 __ Ret();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000119}
120
121
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100122void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100124 const int argument_count = 1;
125 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 __ LoadAddress(arg_reg_1,
127 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100128
129 AllowExternalCallThatCantCauseGC scope(masm);
130 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100132 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000133 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100134 __ ret(0);
135}
136
137
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100138class FloatingPointHelper : public AllStatic {
139 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 enum ConvertUndefined {
141 CONVERT_UNDEFINED_TO_ZERO,
142 BAILOUT_ON_UNDEFINED
143 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100144 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
145 // If the operands are not both numbers, jump to not_numbers.
146 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
147 // NumberOperands assumes both are smis or heap numbers.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100148 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
149 Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100150};
151
152
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153void DoubleToIStub::Generate(MacroAssembler* masm) {
154 Register input_reg = this->source();
155 Register final_result_reg = this->destination();
156 DCHECK(is_truncating());
Ben Murdoch257744e2011-11-30 15:57:28 +0000157
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158 Label check_negative, process_64_bits, done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000159
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 int double_offset = offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000161
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 // Account for return address and saved regs if input is rsp.
163 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000164
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
166 MemOperand exponent_operand(MemOperand(input_reg,
167 double_offset + kDoubleSize / 2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000168
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000169 Register scratch1;
170 Register scratch_candidates[3] = { rbx, rdx, rdi };
171 for (int i = 0; i < 3; i++) {
172 scratch1 = scratch_candidates[i];
173 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
Steve Block1e0659c2011-05-24 12:43:12 +0100174 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100175
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000176 // Since we must use rcx for shifts below, use some other register (rax)
177 // to calculate the result if ecx is the requested return register.
178 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
179 // Save ecx if it isn't the return register and therefore volatile, or if it
180 // is the return register, then save the temp register we use in its stead
181 // for the result.
182 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
183 __ pushq(scratch1);
184 __ pushq(save_reg);
185
186 bool stash_exponent_copy = !input_reg.is(rsp);
187 __ movl(scratch1, mantissa_operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000188 __ Movsd(xmm0, mantissa_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 __ movl(rcx, exponent_operand);
190 if (stash_exponent_copy) __ pushq(rcx);
191
192 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
193 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
194 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
195 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
196 __ j(below, &process_64_bits);
197
198 // Result is entirely in lower 32-bits of mantissa
199 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
200 __ subl(rcx, Immediate(delta));
201 __ xorl(result_reg, result_reg);
202 __ cmpl(rcx, Immediate(31));
203 __ j(above, &done);
204 __ shll_cl(scratch1);
205 __ jmp(&check_negative);
206
207 __ bind(&process_64_bits);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000208 __ Cvttsd2siq(result_reg, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 __ jmp(&done, Label::kNear);
210
211 // If the double was negative, negate the integer result.
212 __ bind(&check_negative);
213 __ movl(result_reg, scratch1);
214 __ negl(result_reg);
215 if (stash_exponent_copy) {
216 __ cmpl(MemOperand(rsp, 0), Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100217 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 __ cmpl(exponent_operand, Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100219 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000220 __ cmovl(greater, result_reg, scratch1);
Steve Block1e0659c2011-05-24 12:43:12 +0100221
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 // Restore registers
Ben Murdochb0fe1622011-05-05 13:52:32 +0100223 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 if (stash_exponent_copy) {
225 __ addp(rsp, Immediate(kDoubleSize));
226 }
227 if (!final_result_reg.is(result_reg)) {
228 DCHECK(final_result_reg.is(rcx));
229 __ movl(final_result_reg, result_reg);
230 }
231 __ popq(save_reg);
232 __ popq(scratch1);
233 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100234}
235
236
237void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
238 Label* not_numbers) {
239 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
240 // Load operand in rdx into xmm0, or branch to not_numbers.
241 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
242 __ JumpIfSmi(rdx, &load_smi_rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000243 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100244 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100246 // Load operand in rax into xmm1, or branch to not_numbers.
247 __ JumpIfSmi(rax, &load_smi_rax);
248
249 __ bind(&load_nonsmi_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100251 __ j(not_equal, not_numbers);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100253 __ jmp(&done);
254
255 __ bind(&load_smi_rdx);
256 __ SmiToInteger32(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257 __ Cvtlsi2sd(xmm0, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100258 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
259
260 __ bind(&load_smi_rax);
261 __ SmiToInteger32(kScratchRegister, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 __ Cvtlsi2sd(xmm1, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100263 __ bind(&done);
264}
265
266
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100267void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 const Register exponent = MathPowTaggedDescriptor::exponent();
269 DCHECK(exponent.is(rdx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100270 const Register base = rax;
271 const Register scratch = rcx;
272 const XMMRegister double_result = xmm3;
273 const XMMRegister double_base = xmm2;
274 const XMMRegister double_exponent = xmm1;
275 const XMMRegister double_scratch = xmm4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100276
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100277 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100278
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100279 // Save 1 in double_result - we need this several times later on.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 __ movp(scratch, Immediate(1));
281 __ Cvtlsi2sd(double_result, scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100282
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100284 Label base_is_smi, unpack_exponent;
285 // The exponent and base are supplied as arguments on the stack.
286 // This can only happen if the stub is called from non-optimized code.
287 // Load input parameters from stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000288 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
289 __ movp(base, args.GetArgumentOperand(0));
290 __ movp(exponent, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
292 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
293 Heap::kHeapNumberMapRootIndex);
294 __ j(not_equal, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100295
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100297 __ jmp(&unpack_exponent, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100298
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100299 __ bind(&base_is_smi);
300 __ SmiToInteger32(base, base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 __ Cvtlsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100302 __ bind(&unpack_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100303
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100304 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
305 __ SmiToInteger32(exponent, exponent);
306 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100307
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100308 __ bind(&exponent_not_smi);
309 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
310 Heap::kHeapNumberMapRootIndex);
311 __ j(not_equal, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100314 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
315 __ SmiToInteger32(exponent, exponent);
316 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100317
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100318 __ bind(&exponent_not_smi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100320 }
321
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 if (exponent_type() != INTEGER) {
323 Label fast_power, try_arithmetic_simplification;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324 // Detect integer exponents stored as double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 __ DoubleToI(exponent, double_exponent, double_scratch,
326 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
327 &try_arithmetic_simplification,
328 &try_arithmetic_simplification);
329 __ jmp(&int_exponent);
330
331 __ bind(&try_arithmetic_simplification);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 __ Cvttsd2si(exponent, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000334 __ cmpl(exponent, Immediate(0x1));
335 __ j(overflow, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100338 // Detect square root case. Crankshaft detects constant +/-0.5 at
339 // compile time and uses DoMathPowHalf instead. We then skip this check
340 // for non-constant cases of +/-0.5 as these hardly occur.
341 Label continue_sqrt, continue_rsqrt, not_plus_half;
342 // Test for 0.5.
343 // Load double_scratch with 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 __ Movq(double_scratch, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100346 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000347 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100348 __ j(not_equal, &not_plus_half, Label::kNear);
349
350 // Calculates square root of base. Check for the special case of
351 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
352 // According to IEEE-754, double-precision -Infinity has the highest
353 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000355 __ Movq(double_scratch, scratch);
356 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100357 // Comparing -Infinity with NaN results in "unordered", which sets the
358 // zero flag as if both were equal. However, it also sets the carry flag.
359 __ j(not_equal, &continue_sqrt, Label::kNear);
360 __ j(carry, &continue_sqrt, Label::kNear);
361
362 // Set result to Infinity in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000363 __ Xorpd(double_result, double_result);
364 __ Subsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100365 __ jmp(&done);
366
367 __ bind(&continue_sqrt);
368 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 __ Xorpd(double_scratch, double_scratch);
370 __ Addsd(double_scratch, double_base); // Convert -0 to 0.
371 __ Sqrtsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100372 __ jmp(&done);
373
374 // Test for -0.5.
375 __ bind(&not_plus_half);
376 // Load double_scratch with -0.5 by substracting 1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000377 __ Subsd(double_scratch, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100378 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000379 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100380 __ j(not_equal, &fast_power, Label::kNear);
381
382 // Calculates reciprocal of square root of base. Check for the special
383 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
384 // According to IEEE-754, double-precision -Infinity has the highest
385 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000387 __ Movq(double_scratch, scratch);
388 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100389 // Comparing -Infinity with NaN results in "unordered", which sets the
390 // zero flag as if both were equal. However, it also sets the carry flag.
391 __ j(not_equal, &continue_rsqrt, Label::kNear);
392 __ j(carry, &continue_rsqrt, Label::kNear);
393
394 // Set result to 0 in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 __ Xorpd(double_result, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100396 __ jmp(&done);
397
398 __ bind(&continue_rsqrt);
399 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 __ Xorpd(double_exponent, double_exponent);
401 __ Addsd(double_exponent, double_base); // Convert -0 to +0.
402 __ Sqrtsd(double_exponent, double_exponent);
403 __ Divsd(double_result, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100404 __ jmp(&done);
405 }
406
407 // Using FPU instructions to calculate power.
408 Label fast_power_failed;
409 __ bind(&fast_power);
410 __ fnclex(); // Clear flags to catch exceptions later.
411 // Transfer (B)ase and (E)xponent onto the FPU register stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000412 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000413 __ Movsd(Operand(rsp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100414 __ fld_d(Operand(rsp, 0)); // E
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 __ Movsd(Operand(rsp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100416 __ fld_d(Operand(rsp, 0)); // B, E
417
418 // Exponent is in st(1) and base is in st(0)
419 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
420 // FYL2X calculates st(1) * log2(st(0))
421 __ fyl2x(); // X
422 __ fld(0); // X, X
423 __ frndint(); // rnd(X), X
424 __ fsub(1); // rnd(X), X-rnd(X)
425 __ fxch(1); // X - rnd(X), rnd(X)
426 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
427 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
428 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100430 // FSCALE calculates st(0) * 2^st(1)
431 __ fscale(); // 2^X, rnd(X)
432 __ fstp(1);
433 // Bail out to runtime in case of exceptions in the status word.
434 __ fnstsw_ax();
435 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
436 __ j(not_zero, &fast_power_failed, Label::kNear);
437 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000438 __ Movsd(double_result, Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ jmp(&done);
441
442 __ bind(&fast_power_failed);
443 __ fninit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100445 __ jmp(&call_runtime);
446 }
447
448 // Calculate power with integer exponent.
449 __ bind(&int_exponent);
450 const XMMRegister double_scratch2 = double_exponent;
451 // Back up exponent as we need to check if exponent is negative later.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 __ movp(scratch, exponent); // Back up exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000453 __ Movsd(double_scratch, double_base); // Back up base.
454 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100455
456 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 __ testl(scratch, scratch);
459 __ j(positive, &no_neg, Label::kNear);
460 __ negl(scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100461 __ bind(&no_neg);
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 __ j(zero, &while_false, Label::kNear);
464 __ shrl(scratch, Immediate(1));
465 // Above condition means CF==0 && ZF==0. This means that the
466 // bit that has been shifted out is 0 and the result is not 0.
467 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468 __ Movsd(double_result, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ j(zero, &while_false, Label::kNear);
470
Ben Murdoch85b71792012-04-11 18:30:58 +0100471 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100472 __ shrl(scratch, Immediate(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473 __ Mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000475 __ Mulsd(double_result, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100476 __ j(not_zero, &while_true);
477
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100479 // If the exponent is negative, return 1/result.
480 __ testl(exponent, exponent);
481 __ j(greater, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 __ Divsd(double_scratch2, double_result);
483 __ Movsd(double_result, double_scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100484 // Test whether result is zero. Bail out to check for subnormal result.
485 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000486 __ Xorpd(double_scratch2, double_scratch2);
487 __ Ucomisd(double_scratch2, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100488 // double_exponent aliased as double_scratch2 has already been overwritten
489 // and may not have contained the exponent value in the first place when the
490 // input was a smi. We reset it with exponent value before bailing out.
491 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 __ Cvtlsi2sd(double_exponent, exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100493
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 // The arguments are still on the stack.
497 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100499
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100500 // The stub is called from non-optimized code, which expects the result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000501 // as heap number in rax.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100502 __ bind(&done);
503 __ AllocateHeapNumber(rax, rcx, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000504 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100505 __ ret(2 * kPointerSize);
506 } else {
507 __ bind(&call_runtime);
508 // Move base to the correct argument register. Exponent is already in xmm1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000509 __ Movsd(xmm0, double_base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 DCHECK(double_exponent.is(xmm1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100511 {
512 AllowExternalCallThatCantCauseGC scope(masm);
513 __ PrepareCallCFunction(2);
514 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 ExternalReference::power_double_double_function(isolate()), 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100516 }
517 // Return value is in xmm0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 __ Movsd(double_result, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100519
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100520 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100521 __ ret(0);
522 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100523}
524
525
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
527 Label miss;
528 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400529 // Ensure that the vector and slot registers won't be clobbered before
530 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
532 LoadDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533
534 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
535 r9, &miss);
536 __ bind(&miss);
537 PropertyAccessCompiler::TailCallBuiltin(
538 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
539}
540
541
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400542void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
543 // Return address is on the stack.
544 Label miss;
545
546 Register receiver = LoadDescriptor::ReceiverRegister();
547 Register index = LoadDescriptor::NameRegister();
548 Register scratch = rdi;
549 Register result = rax;
550 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
552 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553
554 // StringCharAtGenerator doesn't use the result register until it's passed
555 // the different miss possibilities. If it did, we would have a conflict
556 // when FLAG_vector_ics is true.
557 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
558 &miss, // When not a string.
559 &miss, // When not a number.
560 &miss, // When index out of range.
561 STRING_INDEX_IS_ARRAY_INDEX,
562 RECEIVER_IS_STRING);
563 char_at_generator.GenerateFast(masm);
564 __ ret(0);
565
566 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400568
569 __ bind(&miss);
570 PropertyAccessCompiler::TailCallBuiltin(
571 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
572}
573
574
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100575void RegExpExecStub::Generate(MacroAssembler* masm) {
576 // Just jump directly to runtime if native RegExp is not selected at compile
577 // time or if regexp entry in generated code is turned off runtime switch or
578 // at compilation.
579#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100581#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100582
583 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584 // rsp[0] : return address
585 // rsp[8] : last_match_info (expected JSArray)
586 // rsp[16] : previous index
587 // rsp[24] : subject string
588 // rsp[32] : JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100589
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 enum RegExpExecStubArgumentIndices {
591 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
592 SUBJECT_STRING_ARGUMENT_INDEX,
593 PREVIOUS_INDEX_ARGUMENT_INDEX,
594 LAST_MATCH_INFO_ARGUMENT_INDEX,
595 REG_EXP_EXEC_ARGUMENT_COUNT
596 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100597
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
599 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100600 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100601 // Ensure that a RegExp stack is allocated.
602 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100604 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000605 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100606 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 __ testp(kScratchRegister, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100608 __ j(zero, &runtime);
609
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100610 // Check that the first argument is a JSRegExp object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100612 __ JumpIfSmi(rax, &runtime);
613 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
614 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100616 // Check that the RegExp has been compiled (data contains a fixed array).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100618 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100619 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100620 __ Check(NegateCondition(is_smi),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 kUnexpectedTypeForRegExpDataFixedArrayExpected);
Steve Block44f0eee2011-05-26 01:26:41 +0100622 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000623 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100624 }
625
Steve Block44f0eee2011-05-26 01:26:41 +0100626 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100627 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +0100628 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100629 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
630 __ j(not_equal, &runtime);
631
Steve Block44f0eee2011-05-26 01:26:41 +0100632 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100633 // Check that the number of captures fit in the static offsets vector buffer.
634 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +0100635 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000636 // Check (number_of_captures + 1) * 2 <= offsets vector size
637 // Or number_of_captures <= offsets vector size / 2 - 1
638 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
639 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100640 __ j(above, &runtime);
641
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000642 // Reset offset for possibly sliced string.
643 __ Set(r14, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
645 __ JumpIfSmi(rdi, &runtime);
646 __ movp(r15, rdi); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 // rax: RegExp data (FixedArray)
648 // rdi: subject string
649 // r15: subject string
650 // Handle subject string according to its encoding and representation:
651 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100652 // (2) Sequential one byte? If yes, go to (5).
653 // (3) Sequential or cons? If not, go to (6).
654 // (4) Cons string. If the string is flat, replace subject with first string
655 // and go to (1). Otherwise bail out to runtime.
656 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000657 // (E) Carry on.
658 /// [...]
659
660 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100661 // (6) Long external string? If not, go to (10).
662 // (7) External string. Make it, offset-wise, look like a sequential string.
663 // (8) Is the external string one byte? If yes, go to (5).
664 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100666 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667
Ben Murdoch097c5b22016-05-18 11:27:45 +0100668 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
669 external_string /* 7 */, check_underlying /* 1 */,
670 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
671
672 __ bind(&check_underlying);
673 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
674 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675
676 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100677 __ andb(rbx, Immediate(kIsNotStringMask |
678 kStringRepresentationMask |
679 kStringEncodingMask |
680 kShortExternalStringMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100681 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 __ j(zero, &seq_two_byte_string); // Go to (9).
683
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100686 __ andb(rbx, Immediate(kIsNotStringMask |
687 kStringRepresentationMask |
688 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100690
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 // We check whether the subject string is a cons, since sequential strings
693 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000694 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
695 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100696 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
697 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 __ cmpp(rbx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100699 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100700
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701 // (4) Cons string. Check that it's flat.
702 // Replace subject with first string and reload instance type.
Steve Block44f0eee2011-05-26 01:26:41 +0100703 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100705 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100707 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708
Ben Murdoch097c5b22016-05-18 11:27:45 +0100709 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000710 __ bind(&seq_one_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100711 // rax: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
713 __ Set(rcx, 1); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100714
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100716 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 // r11: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100718 // Check that the irregexp code has been generated for the actual string
719 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +0000720 // smi (code flushing support)
721 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100722
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 // rdi: sequential subject string (or look-alike, external string)
724 // r15: original subject string
725 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100726 // r11: code
727 // Load used arguments before starting to push arguments for call to native
728 // RegExp code to avoid handling changing stack height.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 // We have to use r15 instead of rdi to load the length because rdi might
730 // have been only made to look like a sequential string when it actually
731 // is an external string.
732 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
733 __ JumpIfNotSmi(rbx, &runtime);
734 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
735 __ j(above_equal, &runtime);
736 __ SmiToInteger64(rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100737
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100738 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100739 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100741 // r11: code
742 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +0100744 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100745
Steve Block44f0eee2011-05-26 01:26:41 +0100746 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 static const int kRegExpExecuteArguments = 9;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100748 int argument_slots_on_stack =
749 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100750 __ EnterApiExitFrame(argument_slots_on_stack);
751
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 // Argument 9: Pass current isolate address.
753 __ LoadAddress(kScratchRegister,
754 ExternalReference::isolate_address(isolate()));
755 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100756 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100757
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000758 // Argument 8: Indicate that this is a direct call from JavaScript.
759 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100760 Immediate(1));
761
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762 // Argument 7: Start (high end) of backtracking stack memory area.
763 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
764 __ movp(r9, Operand(kScratchRegister, 0));
765 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
766 __ addp(r9, Operand(kScratchRegister, 0));
767 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
768
769 // Argument 6: Set the number of capture registers to zero to force global
770 // regexps to behave as non-global. This does not affect non-global regexps.
771 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100772#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
774 Immediate(0));
775#else
776 __ Set(r9, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100777#endif
778
779 // Argument 5: static offsets vector buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 __ LoadAddress(
781 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100782 // Argument 5 passed in r8 on Linux and on the stack on Windows.
783#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100785#endif
786
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100787 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100788 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000789 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100790 // r11: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000791 // r14: slice offset
792 // r15: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100793
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100794 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 __ movp(arg_reg_2, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100796
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000797 // Argument 4: End of string data
798 // Argument 3: Start of string data
799 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
800 // Prepare start and end index of the input.
801 // Load the length from the original sliced string if that is the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802 __ addp(rbx, r14);
803 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
804 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000805
806 // rbx: start index of the input
807 // r14: end index of the input
808 // r15: original subject string
809 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
810 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 __ leap(arg_reg_4,
812 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
813 __ leap(arg_reg_3,
814 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000815 __ jmp(&setup_rest, Label::kNear);
816 __ bind(&setup_two_byte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817 __ leap(arg_reg_4,
818 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
819 __ leap(arg_reg_3,
820 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000821 __ bind(&setup_rest);
822
823 // Argument 1: Original subject string.
824 // The original subject is in the previous stack frame. Therefore we have to
825 // use rbp, which points exactly to one pointer size below the previous rsp.
826 // (Because creating a new stack frame pushes the previous rbp onto the stack
827 // and thereby moves up rsp by one kPointerSize.)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 __ movp(arg_reg_1, r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100829
830 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100832 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100833
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100835
836 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +0000837 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100838 Label exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000839 __ cmpl(rax, Immediate(1));
840 // We expect exactly one result since we force the called regexp to behave
841 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +0000842 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100843 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100844 __ j(equal, &exception);
845 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
846 // If none of the above, it can only be retry.
847 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100848 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100849
850 // For failure return null.
851 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100853
854 // Load RegExp data.
855 __ bind(&success);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000856 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
857 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100858 __ SmiToInteger32(rax,
859 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
860 // Calculate number of capture registers (number_of_captures + 1) * 2.
861 __ leal(rdx, Operand(rax, rax, times_1, 2));
862
863 // rdx: Number of capture registers
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000864 // Check that the fourth object is a JSArray object.
865 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
866 __ JumpIfSmi(r15, &runtime);
867 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
868 __ j(not_equal, &runtime);
869 // Check that the JSArray is in fast case.
870 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
871 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
872 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
873 __ j(not_equal, &runtime);
874 // Check that the last match info has space for the capture registers and the
875 // additional information. Ensure no overflow in add.
876 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
877 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
878 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
879 __ cmpl(rdx, rax);
880 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100881
882 // rbx: last_match_info backing store (FixedArray)
883 // rdx: number of capture registers
884 // Store the capture count.
885 __ Integer32ToSmi(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100887 kScratchRegister);
888 // Store last subject and last input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
890 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
891 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100892 __ RecordWriteField(rbx,
893 RegExpImpl::kLastSubjectOffset,
894 rax,
895 rdi,
896 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 __ movp(rax, rcx);
898 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100899 __ RecordWriteField(rbx,
900 RegExpImpl::kLastInputOffset,
901 rax,
902 rdi,
903 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100904
905 // Get the static offsets vector filled by the native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000906 __ LoadAddress(
907 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100908
909 // rbx: last_match_info backing store (FixedArray)
910 // rcx: offsets vector
911 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000912 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100913 // Capture register counter starts from number of capture registers and
914 // counts down until wraping after zero.
915 __ bind(&next_capture);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000916 __ subp(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000917 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100918 // Read the value from the static offsets vector buffer and make it a smi.
919 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100920 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100921 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000922 __ movp(FieldOperand(rbx,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100923 rdx,
924 times_pointer_size,
925 RegExpImpl::kFirstCaptureOffset),
926 rdi);
927 __ jmp(&next_capture);
928 __ bind(&done);
929
930 // Return last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000931 __ movp(rax, r15);
932 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100933
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100934 __ bind(&exception);
935 // Result must now be exception. If there is no pending exception already a
936 // stack overflow (on the backtrack stack) was detected in RegExp code but
937 // haven't created the exception yet. Handle that in the runtime system.
938 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +0100939 ExternalReference pending_exception_address(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 Isolate::kPendingExceptionAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100941 Operand pending_exception_operand =
942 masm->ExternalOperand(pending_exception_address, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 __ movp(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100944 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 __ cmpp(rax, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100946 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100947
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 // For exception, throw the exception again.
949 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100950
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 // Do the runtime call to execute the regexp.
952 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000953 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954
955 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100956 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000957 __ bind(&not_seq_nor_cons);
958 // Compare flags are still set from (3).
959 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
960
Ben Murdoch097c5b22016-05-18 11:27:45 +0100961 // (7) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100962 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000963 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100964 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
965 if (FLAG_debug_code) {
966 // Assert that we do not have a cons or slice (indirect strings) here.
967 // Sequential strings have already been ruled out.
968 __ testb(rbx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000969 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100970 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000971 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100972 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000973 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
974 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100975 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100976 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100977 __ testb(rbx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100978 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000979
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000980 // rdi: subject string (flat two-byte)
981 // rax: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100982 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000983 __ bind(&seq_two_byte_string);
984 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
985 __ Set(rcx, 0); // Type is two byte.
986 __ jmp(&check_code); // Go to (E).
987
988 // (10) Not a string or a short external string? If yes, bail out to runtime.
989 __ bind(&not_long_external);
990 // Catch non-string subject or short external string.
991 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
992 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
993 __ j(not_zero, &runtime);
994
Ben Murdoch097c5b22016-05-18 11:27:45 +0100995 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000996 // Load offset into r14 and replace subject string with parent.
997 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
998 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
999 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001000#endif // V8_INTERPRETED_REGEXP
1001}
1002
1003
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001004static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001005 DCHECK(cc != equal);
1006 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001007 || (cc == greater) || (cc == greater_equal));
1008 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1009}
1010
1011
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001012static void CheckInputType(MacroAssembler* masm, Register input,
1013 CompareICState::State expected, Label* fail) {
1014 Label ok;
1015 if (expected == CompareICState::SMI) {
1016 __ JumpIfNotSmi(input, fail);
1017 } else if (expected == CompareICState::NUMBER) {
1018 __ JumpIfSmi(input, &ok);
1019 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1020 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001021 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001022 // We could be strict about internalized/non-internalized here, but as long as
1023 // hydrogen doesn't care, the stub doesn't have to care either.
1024 __ bind(&ok);
1025}
1026
1027
1028static void BranchIfNotInternalizedString(MacroAssembler* masm,
1029 Label* label,
1030 Register object,
1031 Register scratch) {
1032 __ JumpIfSmi(object, label);
1033 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1034 __ movzxbp(scratch,
1035 FieldOperand(scratch, Map::kInstanceTypeOffset));
1036 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1037 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1038 __ j(not_zero, label);
1039}
1040
1041
1042void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001043 Label runtime_call, check_unequal_objects, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001044 Condition cc = GetCondition();
1045 Factory* factory = isolate()->factory();
1046
1047 Label miss;
1048 CheckInputType(masm, rdx, left(), &miss);
1049 CheckInputType(masm, rax, right(), &miss);
1050
1051 // Compare two smis.
1052 Label non_smi, smi_done;
1053 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1054 __ subp(rdx, rax);
1055 __ j(no_overflow, &smi_done);
1056 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1057 __ bind(&smi_done);
1058 __ movp(rax, rdx);
1059 __ ret(0);
1060 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001061
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001062 // The compare stub returns a positive, negative, or zero 64-bit integer
1063 // value in rax, corresponding to result of comparing the two inputs.
1064 // NOTICE! This code is only reached after a smi-fast-case check, so
1065 // it is certain that at least one operand isn't a smi.
1066
1067 // Two identical objects are equal unless they are both NaN or undefined.
1068 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001069 Label not_identical;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001070 __ cmpp(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001071 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001072
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001073 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001074 // Check for undefined. undefined OP undefined is false even though
1075 // undefined == undefined.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001076 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001077 Label check_for_nan;
1078 __ j(not_equal, &check_for_nan, Label::kNear);
1079 __ Set(rax, NegativeComparisonResult(cc));
1080 __ ret(0);
1081 __ bind(&check_for_nan);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001082 }
1083
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001085 // so we do the second best thing - test it ourselves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 Label heap_number;
1087 // If it's not a heap number, then return equal for (in)equality operator.
1088 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1089 factory->heap_number_map());
1090 __ j(equal, &heap_number, Label::kNear);
1091 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001092 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1093 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001094 // Call runtime on identical objects. Otherwise return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001095 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
1096 __ j(above_equal, &runtime_call, Label::kFar);
1097 // Call runtime on identical symbols since we need to throw a TypeError.
1098 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
1099 __ j(equal, &runtime_call, Label::kFar);
1100 // Call runtime on identical SIMD values since we must throw a TypeError.
1101 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
1102 __ j(equal, &runtime_call, Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001103 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001104 __ Set(rax, EQUAL);
1105 __ ret(0);
1106
1107 __ bind(&heap_number);
1108 // It is a heap number, so return equal if it's not NaN.
1109 // For NaN, return 1 for every condition except greater and
1110 // greater-equal. Return -1 for them, so the comparison yields
1111 // false for all conditions except not-equal.
1112 __ Set(rax, EQUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1114 __ Ucomisd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001115 __ setcc(parity_even, rax);
1116 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1117 if (cc == greater_equal || cc == greater) {
1118 __ negp(rax);
1119 }
1120 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001121
1122 __ bind(&not_identical);
1123 }
1124
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001125 if (cc == equal) { // Both strict and non-strict.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001126 Label slow; // Fallthrough label.
1127
1128 // If we're doing a strict equality comparison, we don't have to do
1129 // type conversion, so we generate code to do fast comparison for objects
1130 // and oddballs. Non-smi numbers and strings still go through the usual
1131 // slow-case code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001133 // If either is a Smi (we know that not both are), then they can only
1134 // be equal if the other is a HeapNumber. If so, use the slow case.
1135 {
1136 Label not_smis;
1137 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1138
1139 // Check if the non-smi operand is a heap number.
1140 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001141 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001142 // If heap number, handle it in the slow case.
1143 __ j(equal, &slow);
1144 // Return non-equal. ebx (the lower half of rbx) is not zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 __ movp(rax, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001146 __ ret(0);
1147
1148 __ bind(&not_smis);
1149 }
1150
1151 // If either operand is a JSObject or an oddball value, then they are not
1152 // equal since their pointers are different
1153 // There is no test for undetectability in strict equality.
1154
1155 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001157 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001159 __ j(below, &first_non_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001160 // Return non-zero (rax (not rax) is not zero)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001161 Label return_not_equal;
1162 STATIC_ASSERT(kHeapObjectTag != 0);
1163 __ bind(&return_not_equal);
1164 __ ret(0);
1165
1166 __ bind(&first_non_object);
1167 // Check for oddballs: true, false, null, undefined.
1168 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1169 __ j(equal, &return_not_equal);
1170
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001171 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001172 __ j(above_equal, &return_not_equal);
1173
1174 // Check for oddballs: true, false, null, undefined.
1175 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1176 __ j(equal, &return_not_equal);
1177
1178 // Fall through to the general case.
1179 }
1180 __ bind(&slow);
1181 }
1182
1183 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001184 Label non_number_comparison;
1185 Label unordered;
1186 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1187 __ xorl(rax, rax);
1188 __ xorl(rcx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 __ Ucomisd(xmm0, xmm1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001190
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 // Don't base result on EFLAGS when a NaN is involved.
1192 __ j(parity_even, &unordered, Label::kNear);
1193 // Return a result of -1, 0, or 1, based on EFLAGS.
1194 __ setcc(above, rax);
1195 __ setcc(below, rcx);
1196 __ subp(rax, rcx);
1197 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199 // If one of the numbers was NaN, then the result is always false.
1200 // The cc is never not-equal.
1201 __ bind(&unordered);
1202 DCHECK(cc != not_equal);
1203 if (cc == less || cc == less_equal) {
1204 __ Set(rax, 1);
1205 } else {
1206 __ Set(rax, -1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001207 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001208 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 // The number comparison code did not provide a valid result.
1211 __ bind(&non_number_comparison);
1212
1213 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001214 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215 if (cc == equal) {
1216 BranchIfNotInternalizedString(
1217 masm, &check_for_strings, rax, kScratchRegister);
1218 BranchIfNotInternalizedString(
1219 masm, &check_for_strings, rdx, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 // We've already checked for object identity, so if both operands are
1222 // internalized strings they aren't equal. Register rax (not rax) already
1223 // holds a non-zero value, which indicates not equal, so just return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001224 __ ret(0);
1225 }
1226
1227 __ bind(&check_for_strings);
1228
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001229 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1230 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001231
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 // Inline comparison of one-byte strings.
1233 if (cc == equal) {
1234 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001235 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1237 rdi, r8);
Ben Murdoch257744e2011-11-30 15:57:28 +00001238 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001239
1240#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001242#endif
1243
1244 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001245 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001246 // Not strict equality. Objects are unequal if
1247 // they are both JSObjects and not undetectable,
1248 // and their pointers are different.
Ben Murdochda12d292016-06-02 14:46:10 +01001249 Label return_equal, return_unequal, undetectable;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001250 // At most one is a smi, so we can test for smi by adding the two.
1251 // A smi plus a heap object has the low bit set, a heap object plus
1252 // a heap object has the low bit clear.
1253 STATIC_ASSERT(kSmiTag == 0);
1254 STATIC_ASSERT(kSmiTagMask == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255 __ leap(rcx, Operand(rax, rdx, times_1, 0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001256 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001257 __ j(not_zero, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001258
1259 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1260 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001261 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1262 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001263 __ j(not_zero, &undetectable, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001264 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1265 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001266 __ j(not_zero, &return_unequal, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001267
1268 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
1269 __ j(below, &runtime_call, Label::kNear);
1270 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
1271 __ j(below, &runtime_call, Label::kNear);
1272
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001273 __ bind(&return_unequal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001274 // Return non-equal by returning the non-zero object pointer in rax.
1275 __ ret(0);
1276
1277 __ bind(&undetectable);
1278 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1279 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001280 __ j(zero, &return_unequal, Label::kNear);
1281
1282 // If both sides are JSReceivers, then the result is false according to
1283 // the HTML specification, which says that only comparisons with null or
1284 // undefined are affected by special casing for document.all.
1285 __ CmpInstanceType(rbx, ODDBALL_TYPE);
1286 __ j(zero, &return_equal, Label::kNear);
1287 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1288 __ j(not_zero, &return_unequal, Label::kNear);
1289
1290 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001291 __ Set(rax, EQUAL);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001292 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001293 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001295
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001297 {
1298 FrameScope scope(masm, StackFrame::INTERNAL);
1299 __ Push(rdx);
1300 __ Push(rax);
1301 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1302 }
1303 // Turn true into 0 and false into some non-zero value.
1304 STATIC_ASSERT(EQUAL == 0);
1305 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1306 __ subp(rax, rdx);
1307 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001308 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001309 // Push arguments below the return address to prepare jump to builtin.
1310 __ PopReturnAddressTo(rcx);
1311 __ Push(rdx);
1312 __ Push(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001313 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001314 __ PushReturnAddressFrom(rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001315 __ TailCallRuntime(Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001316 }
1317
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 __ bind(&miss);
1319 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001320}
1321
1322
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1324 // rax : number of arguments to the construct function
1325 // rbx : feedback vector
1326 // rdx : slot in feedback vector (Smi)
1327 // rdi : the function to call
1328 FrameScope scope(masm, StackFrame::INTERNAL);
1329
1330 // Number-of-arguments register must be smi-tagged to call out.
1331 __ Integer32ToSmi(rax, rax);
1332 __ Push(rax);
1333 __ Push(rdi);
1334 __ Integer32ToSmi(rdx, rdx);
1335 __ Push(rdx);
1336 __ Push(rbx);
1337
1338 __ CallStub(stub);
1339
1340 __ Pop(rbx);
1341 __ Pop(rdx);
1342 __ Pop(rdi);
1343 __ Pop(rax);
1344 __ SmiToInteger32(rax, rax);
1345}
1346
1347
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001350 // are uninitialized, monomorphic (indicated by a JSFunction), and
1351 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 // rax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001353 // rbx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001355 // rdi : the function to call
1356 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 Label initialize, done, miss, megamorphic, not_array_function,
1358 done_no_smi_convert;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001359
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360 // Load the cache state into r11.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001362 __ movp(r11,
1363 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001364
1365 // A monomorphic cache hit or an already megamorphic state: invoke the
1366 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1368 // at this position in a symbol (see static asserts in
1369 // type-feedback-vector.h).
1370 Label check_allocation_site;
1371 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1372 __ j(equal, &done, Label::kFar);
1373 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1374 __ j(equal, &done, Label::kFar);
1375 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1376 Heap::kWeakCellMapRootIndex);
1377 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001379 // If the weak cell is cleared, we have a new chance to become monomorphic.
1380 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1381 __ j(equal, &initialize);
1382 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001384 __ bind(&check_allocation_site);
1385 // If we came here, we need to see if we are the array function.
1386 // If we didn't have a matching function, and we didn't find the megamorph
1387 // sentinel, then we have in the slot either some other function or an
1388 // AllocationSite.
1389 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1390 __ j(not_equal, &miss);
1391
1392 // Make sure the function is the Array() function
1393 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1394 __ cmpp(rdi, r11);
1395 __ j(not_equal, &megamorphic);
1396 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397
1398 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001399
1400 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1401 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001402 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001403 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001404 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1405 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001406 __ bind(&megamorphic);
1407 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1408 TypeFeedbackVector::MegamorphicSentinel(isolate));
1409 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411 // An uninitialized cache is patched with the function or sentinel to
1412 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001413 __ bind(&initialize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 // Make sure the function is the Array() function
1416 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1417 __ cmpp(rdi, r11);
1418 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420 CreateAllocationSiteStub create_stub(isolate);
1421 CallStubInRecordCallTarget(masm, &create_stub);
1422 __ jmp(&done_no_smi_convert);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 __ bind(&not_array_function);
1425 CreateWeakCellStub weak_cell_stub(isolate);
1426 CallStubInRecordCallTarget(masm, &weak_cell_stub);
1427 __ jmp(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001428
1429 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001430 __ Integer32ToSmi(rdx, rdx);
1431
1432 __ bind(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001433}
1434
1435
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001436void CallConstructStub::Generate(MacroAssembler* masm) {
1437 // rax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438 // rbx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001439 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001440 // rdi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001441
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001442 Label non_function;
1443 // Check that the constructor is not a smi.
1444 __ JumpIfSmi(rdi, &non_function);
1445 // Check that constructor is a JSFunction.
1446 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1447 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001448
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001449 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001450
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 Label feedback_register_initialized;
1453 // Put the AllocationSite from the feedback vector into rbx, or undefined.
1454 __ movp(rbx,
1455 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1456 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
1457 __ j(equal, &feedback_register_initialized, Label::kNear);
1458 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1459 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001460
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 __ AssertUndefinedOrAllocationSite(rbx);
1462
1463 // Pass new target to construct stub.
1464 __ movp(rdx, rdi);
1465
1466 // Tail call to the function-specific construct stub (still in the caller
1467 // context at this point).
1468 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1469 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
1470 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
1471 __ jmp(rcx);
1472
1473 __ bind(&non_function);
1474 __ movp(rdx, rdi);
1475 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1476}
1477
1478
1479void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1480 // rdi - function
1481 // rdx - slot id
1482 // rbx - vector
1483 // rcx - allocation site (loaded from vector[slot]).
1484 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1485 __ cmpp(rdi, r8);
1486 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001487
1488 __ movp(rax, Immediate(arg_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489
1490 // Increment the call count for monomorphic function calls.
1491 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1492 FixedArray::kHeaderSize + kPointerSize),
1493 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001494
1495 __ movp(rbx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 __ movp(rdx, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001497 ArrayConstructorStub stub(masm->isolate(), arg_count());
1498 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001499}
1500
1501
1502void CallICStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001503 // ----------- S t a t e -------------
1504 // -- rdi - function
1505 // -- rdx - slot id
1506 // -- rbx - vector
1507 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001508 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001510 int argc = arg_count();
1511 StackArgumentsAccessor args(rsp, argc);
1512 ParameterCount actual(argc);
1513
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514 // The checks. First, does rdi match the recorded monomorphic target?
1515 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 __ movp(rcx,
1517 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1518
1519 // We don't know that we have a weak cell. We might have a private symbol
1520 // or an AllocationSite, but the memory is safe to examine.
1521 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1522 // FixedArray.
1523 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1524 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1525 // computed, meaning that it can't appear to be a pointer. If the low bit is
1526 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1527 // to be a pointer.
1528 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1529 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1530 WeakCell::kValueOffset &&
1531 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1532
1533 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001534 __ j(not_equal, &extra_checks_or_miss);
1535
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 // The compare above could have been a SMI/SMI comparison. Guard against this
1537 // convincing us that we have a monomorphic JSFunction.
1538 __ JumpIfSmi(rdi, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 // Increment the call count for monomorphic function calls.
1541 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1542 FixedArray::kHeaderSize + kPointerSize),
1543 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 __ bind(&call_function);
1546 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001547 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1548 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001550
1551 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001554 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001555 __ j(equal, &call);
1556
1557 // Check if we have an allocation site.
1558 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
1559 Heap::kAllocationSiteMapRootIndex);
1560 __ j(not_equal, &not_allocation_site);
1561
1562 // We have an allocation site.
1563 HandleArrayCase(masm, &miss);
1564
1565 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001567 // The following cases attempt to handle MISS cases without going to the
1568 // runtime.
1569 if (FLAG_trace_ic) {
1570 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571 }
1572
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001573 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1574 __ j(equal, &uninitialized);
1575
1576 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1577 // to handle it here. More complex cases are dealt with in the runtime.
1578 __ AssertNotSmi(rcx);
1579 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1580 __ j(not_equal, &miss);
1581 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1582 TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583
1584 __ bind(&call);
1585 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001586 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001588
1589 __ bind(&uninitialized);
1590
1591 // We are going monomorphic, provided we actually have a JSFunction.
1592 __ JumpIfSmi(rdi, &miss);
1593
1594 // Goto miss case if we do not have a function.
1595 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1596 __ j(not_equal, &miss);
1597
1598 // Make sure the function is not the Array() function, which requires special
1599 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001601 __ cmpp(rdi, rcx);
1602 __ j(equal, &miss);
1603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 // Make sure the function belongs to the same native context.
1605 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1606 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
1607 __ cmpp(rcx, NativeContextOperand());
1608 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 // Initialize the call counter.
1611 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
1612 FixedArray::kHeaderSize + kPointerSize),
1613 Smi::FromInt(CallICNexus::kCallCountIncrement));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001615 // Store the function. Use a stub since we need a frame for allocation.
1616 // rbx - vector
1617 // rdx - slot (needs to be in smi form)
1618 // rdi - function
1619 {
1620 FrameScope scope(masm, StackFrame::INTERNAL);
1621 CreateWeakCellStub create_stub(isolate);
1622
1623 __ Integer32ToSmi(rdx, rdx);
1624 __ Push(rdi);
1625 __ CallStub(&create_stub);
1626 __ Pop(rdi);
1627 }
1628
1629 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001630
1631 // We are here because tracing is on or we encountered a MISS case we can't
1632 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 __ bind(&miss);
1634 GenerateMiss(masm);
1635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001637
1638 // Unreachable
1639 __ int3();
1640}
1641
1642
1643void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 // Push the receiver and the function and feedback info.
1647 __ Push(rdi);
1648 __ Push(rbx);
1649 __ Integer32ToSmi(rdx, rdx);
1650 __ Push(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001651
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001652 // Call the entry.
1653 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001655 // Move result to edi and exit the internal frame.
1656 __ movp(rdi, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001657}
1658
1659
Steve Block44f0eee2011-05-26 01:26:41 +01001660bool CEntryStub::NeedsImmovableCode() {
1661 return false;
1662}
1663
1664
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1666 CEntryStub::GenerateAheadOfTime(isolate);
1667 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1668 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001669 // It is important that the store buffer overflow stubs are generated first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1671 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001672 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 BinaryOpICStub::GenerateAheadOfTime(isolate);
1674 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 StoreFastElementStub::GenerateAheadOfTime(isolate);
1676 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001677}
1678
1679
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680void CodeStub::GenerateFPStubs(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001681}
1682
1683
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001684void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1685 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1686 stub.GetCode();
1687 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1688 save_doubles.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001689}
1690
1691
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001692void CEntryStub::Generate(MacroAssembler* masm) {
1693 // rax: number of arguments including receiver
1694 // rbx: pointer to C function (C callee-saved)
1695 // rbp: frame pointer of calling JS frame (restored after C call)
1696 // rsp: stack pointer (restored after C call)
1697 // rsi: current context (restored)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001698 //
1699 // If argv_in_register():
1700 // r15: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001701
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001702 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001703
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001704#ifdef _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001705 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
1706 // stack to be aligned to 16 bytes. It only allows a single-word to be
1707 // returned in register rax. Larger return sizes must be written to an address
1708 // passed as a hidden first argument.
1709 const Register kCCallArg0 = rcx;
1710 const Register kCCallArg1 = rdx;
1711 const Register kCCallArg2 = r8;
1712 const Register kCCallArg3 = r9;
1713 const int kArgExtraStackSpace = 2;
1714 const int kMaxRegisterResultSize = 1;
1715#else
1716 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
1717 // are returned in rax, and a struct of two pointers are returned in rax+rdx.
1718 // Larger return sizes must be written to an address passed as a hidden first
1719 // argument.
1720 const Register kCCallArg0 = rdi;
1721 const Register kCCallArg1 = rsi;
1722 const Register kCCallArg2 = rdx;
1723 const Register kCCallArg3 = rcx;
1724 const int kArgExtraStackSpace = 0;
1725 const int kMaxRegisterResultSize = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726#endif // _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001727
1728 // Enter the exit frame that transitions from JavaScript to C++.
1729 int arg_stack_space =
1730 kArgExtraStackSpace +
1731 (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 if (argv_in_register()) {
1733 DCHECK(!save_doubles());
1734 __ EnterApiExitFrame(arg_stack_space);
1735 // Move argc into r14 (argv is already in r15).
1736 __ movp(r14, rax);
1737 } else {
1738 __ EnterExitFrame(arg_stack_space, save_doubles());
1739 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001740
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001741 // rbx: pointer to builtin function (C callee-saved).
1742 // rbp: frame pointer of exit frame (restored after C call).
1743 // rsp: stack pointer (restored after C call).
1744 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01001745 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001746
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001747 // Check stack alignment.
1748 if (FLAG_debug_code) {
1749 __ CheckStackAlignment();
1750 }
1751
Ben Murdoch097c5b22016-05-18 11:27:45 +01001752 // Call C function. The arguments object will be created by stubs declared by
1753 // DECLARE_RUNTIME_FUNCTION().
1754 if (result_size() <= kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 // Pass a pointer to the Arguments object as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001756 // Return result in single register (rax), or a register pair (rax, rdx).
1757 __ movp(kCCallArg0, r14); // argc.
1758 __ movp(kCCallArg1, r15); // argv.
1759 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001761 DCHECK_LE(result_size(), 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 // Pass a pointer to the result location as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001763 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 // Pass a pointer to the Arguments object as the second argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001765 __ movp(kCCallArg1, r14); // argc.
1766 __ movp(kCCallArg2, r15); // argv.
1767 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001768 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001769 __ call(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001770
Ben Murdoch097c5b22016-05-18 11:27:45 +01001771 if (result_size() > kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001772 // Read result values stored on stack. Result is stored
Ben Murdoch097c5b22016-05-18 11:27:45 +01001773 // above the the two Arguments object slots on Win64.
1774 DCHECK_LE(result_size(), 3);
1775 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
1776 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
1777 if (result_size() > 2) {
1778 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
1779 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001780 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001781 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001783 // Check result for exception sentinel.
1784 Label exception_returned;
1785 __ CompareRoot(rax, Heap::kExceptionRootIndex);
1786 __ j(equal, &exception_returned);
1787
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 // Check that there is no pending exception, otherwise we
1789 // should have returned the exception sentinel.
1790 if (FLAG_debug_code) {
1791 Label okay;
1792 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793 ExternalReference pending_exception_address(
1794 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795 Operand pending_exception_operand =
1796 masm->ExternalOperand(pending_exception_address);
1797 __ cmpp(r14, pending_exception_operand);
1798 __ j(equal, &okay, Label::kNear);
1799 __ int3();
1800 __ bind(&okay);
1801 }
1802
1803 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001804 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805 __ ret(0);
1806
1807 // Handling of exception.
1808 __ bind(&exception_returned);
1809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001810 ExternalReference pending_handler_context_address(
1811 Isolate::kPendingHandlerContextAddress, isolate());
1812 ExternalReference pending_handler_code_address(
1813 Isolate::kPendingHandlerCodeAddress, isolate());
1814 ExternalReference pending_handler_offset_address(
1815 Isolate::kPendingHandlerOffsetAddress, isolate());
1816 ExternalReference pending_handler_fp_address(
1817 Isolate::kPendingHandlerFPAddress, isolate());
1818 ExternalReference pending_handler_sp_address(
1819 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001821 // Ask the runtime for help to determine the handler. This will set rax to
1822 // contain the current pending exception, don't clobber it.
1823 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1824 isolate());
1825 {
1826 FrameScope scope(masm, StackFrame::MANUAL);
1827 __ movp(arg_reg_1, Immediate(0)); // argc.
1828 __ movp(arg_reg_2, Immediate(0)); // argv.
1829 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
1830 __ PrepareCallCFunction(3);
1831 __ CallCFunction(find_handler, 3);
1832 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834 // Retrieve the handler context, SP and FP.
1835 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1836 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1837 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001838
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001839 // If the handler is a JS frame, restore the context to the frame. Note that
1840 // the context will be set to (rsi == 0) for non-JS frames.
1841 Label skip;
1842 __ testp(rsi, rsi);
1843 __ j(zero, &skip, Label::kNear);
1844 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1845 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001846
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001847 // Compute the handler entry address and jump to it.
1848 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1849 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1850 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1851 __ jmp(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001852}
1853
1854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001856 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001857 Label not_outermost_js, not_outermost_js_2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001858
1859 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1860
Steve Block44f0eee2011-05-26 01:26:41 +01001861 { // NOLINT. Scope block confuses linter.
1862 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001863 // Set up frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 __ pushq(rbp);
1865 __ movp(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001866
Ben Murdochda12d292016-06-02 14:46:10 +01001867 // Push the stack frame type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868 int marker = type();
Ben Murdochda12d292016-06-02 14:46:10 +01001869 __ Push(Smi::FromInt(marker)); // context slot
1870 ExternalReference context_address(Isolate::kContextAddress, isolate());
1871 __ Load(kScratchRegister, context_address);
1872 __ Push(kScratchRegister); // context
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001873 // Save callee-saved registers (X64/X32/Win64 calling conventions).
1874 __ pushq(r12);
1875 __ pushq(r13);
1876 __ pushq(r14);
1877 __ pushq(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001878#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001879 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1880 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001881#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 __ pushq(rbx);
1883
1884#ifdef _WIN64
1885 // On Win64 XMM6-XMM15 are callee-save
1886 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1887 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1888 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1889 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1890 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1891 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1892 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1893 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1894 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1895 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1896 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1897#endif
Steve Block44f0eee2011-05-26 01:26:41 +01001898
1899 // Set up the roots and smi constant registers.
1900 // Needs to be done before any further smi loads.
Steve Block44f0eee2011-05-26 01:26:41 +01001901 __ InitializeRootRegister();
1902 }
1903
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001904 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001906 {
1907 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001908 __ Push(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001909 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001910
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001911 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001913 __ Load(rax, js_entry_sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001914 __ testp(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001915 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001916 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 __ movp(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01001918 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01001919 Label cont;
1920 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001921 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001922 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
1923 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001924
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001925 // Jump to a faked try block that does the invoke, with a faked catch
1926 // block that sets the pending exception.
1927 __ jmp(&invoke);
1928 __ bind(&handler_entry);
1929 handler_offset_ = handler_entry.pos();
1930 // Caught exception: Store result (exception) in the pending exception
1931 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00001932 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001933 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001934 __ Store(pending_exception, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 __ LoadRoot(rax, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001936 __ jmp(&exit);
1937
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001939 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001941
1942 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01001943 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
1944 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001945
1946 // Fake a receiver (NULL).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 __ Push(Immediate(0)); // receiver
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001948
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001949 // Invoke the function by calling through JS entry trampoline builtin and
1950 // pop the faked function when we return. We load the address from an
1951 // external reference instead of inlining the call target address directly
1952 // in the code, because the builtin stubs may not have been generated yet
1953 // at the time this code is generated.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001954 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001955 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001956 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001957 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001958 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001960 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001961 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001963 __ call(kScratchRegister);
1964
1965 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001966 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001967
Steve Block053d10c2011-06-13 19:13:29 +01001968 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01001969 // Check if the current stack frame is marked as the outermost JS frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001970 __ Pop(rbx);
Steve Block053d10c2011-06-13 19:13:29 +01001971 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001972 __ j(not_equal, &not_outermost_js_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001973 __ Move(kScratchRegister, js_entry_sp);
1974 __ movp(Operand(kScratchRegister, 0), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001975 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001976
1977 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01001978 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001979 __ Pop(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001980 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001981
1982 // Restore callee-saved registers (X64 conventions).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001983#ifdef _WIN64
1984 // On Win64 XMM6-XMM15 are callee-save
1985 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
1986 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
1987 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
1988 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
1989 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
1990 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
1991 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
1992 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
1993 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
1994 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
1995 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1996#endif
1997
1998 __ popq(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001999#ifdef _WIN64
2000 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002001 __ popq(rsi);
2002 __ popq(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002003#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 __ popq(r15);
2005 __ popq(r14);
2006 __ popq(r13);
2007 __ popq(r12);
2008 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002009
2010 // Restore frame pointer and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002011 __ popq(rbp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002012 __ ret(0);
2013}
2014
2015
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002016void InstanceOfStub::Generate(MacroAssembler* masm) {
2017 Register const object = rdx; // Object (lhs).
2018 Register const function = rax; // Function (rhs).
2019 Register const object_map = rcx; // Map of {object}.
2020 Register const function_map = r8; // Map of {function}.
2021 Register const function_prototype = rdi; // Prototype of {function}.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2024 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002026 // Check if {object} is a smi.
2027 Label object_is_smi;
2028 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002029
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 // Lookup the {function} and the {object} map in the global instanceof cache.
2031 // Note: This is safe because we clear the global instanceof cache whenever
2032 // we change the prototype of any object.
2033 Label fast_case, slow_case;
2034 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2035 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2036 __ j(not_equal, &fast_case, Label::kNear);
2037 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2038 __ j(not_equal, &fast_case, Label::kNear);
2039 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2040 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 // If {object} is a smi we can safely return false if {function} is a JS
2043 // function, otherwise we have to miss to the runtime and throw an exception.
2044 __ bind(&object_is_smi);
2045 __ JumpIfSmi(function, &slow_case);
2046 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2047 __ j(not_equal, &slow_case);
2048 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2049 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002050
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051 // Fast-case: The {function} must be a valid JSFunction.
2052 __ bind(&fast_case);
2053 __ JumpIfSmi(function, &slow_case);
2054 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2055 __ j(not_equal, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002056
Ben Murdochda12d292016-06-02 14:46:10 +01002057 // Go to the runtime if the function is not a constructor.
2058 __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
2059 Immediate(1 << Map::kIsConstructor));
2060 __ j(zero, &slow_case);
2061
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 // Ensure that {function} has an instance prototype.
2063 __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
2064 Immediate(1 << Map::kHasNonInstancePrototype));
2065 __ j(not_zero, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002066
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067 // Get the "prototype" (or initial map) of the {function}.
2068 __ movp(function_prototype,
2069 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2070 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 // Resolve the prototype if the {function} has an initial map. Afterwards the
2073 // {function_prototype} will be either the JSReceiver prototype object or the
2074 // hole value, which means that no instances of the {function} were created so
2075 // far and hence we should return false.
2076 Label function_prototype_valid;
2077 Register const function_prototype_map = kScratchRegister;
2078 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2079 __ j(not_equal, &function_prototype_valid, Label::kNear);
2080 __ movp(function_prototype,
2081 FieldOperand(function_prototype, Map::kPrototypeOffset));
2082 __ bind(&function_prototype_valid);
2083 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002084
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085 // Update the global instanceof cache with the current {object} map and
2086 // {function}. The cached answer will be set when it is known below.
2087 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2088 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002089
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002090 // Loop through the prototype chain looking for the {function} prototype.
2091 // Assume true, and change to false if not found.
2092 Label done, loop, fast_runtime_fallback;
2093 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002094 __ bind(&loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002095
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002096 __ testb(FieldOperand(object_map, Map::kBitFieldOffset),
2097 Immediate(1 << Map::kIsAccessCheckNeeded));
2098 __ j(not_zero, &fast_runtime_fallback, Label::kNear);
2099 __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2100 __ j(equal, &fast_runtime_fallback, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002102 __ movp(object, FieldOperand(object_map, Map::kPrototypeOffset));
2103 __ cmpp(object, function_prototype);
2104 __ j(equal, &done, Label::kNear);
2105 __ CompareRoot(object, Heap::kNullValueRootIndex);
2106 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2107 __ j(not_equal, &loop);
2108 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2109 __ bind(&done);
2110 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2111 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002113 // Found Proxy or access check needed: Call the runtime.
2114 __ bind(&fast_runtime_fallback);
2115 __ PopReturnAddressTo(kScratchRegister);
2116 __ Push(object);
2117 __ Push(function_prototype);
2118 __ PushReturnAddressFrom(kScratchRegister);
2119 // Invalidate the instanceof cache.
2120 __ Move(rax, Smi::FromInt(0));
2121 __ StoreRoot(rax, Heap::kInstanceofCacheFunctionRootIndex);
2122 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
2123
2124 // Slow-case: Call the %InstanceOf runtime function.
2125 __ bind(&slow_case);
2126 __ PopReturnAddressTo(kScratchRegister);
2127 __ Push(object);
2128 __ Push(function);
2129 __ PushReturnAddressFrom(kScratchRegister);
Ben Murdochda12d292016-06-02 14:46:10 +01002130 __ TailCallRuntime(is_es6_instanceof() ? Runtime::kOrdinaryHasInstance
2131 : Runtime::kInstanceOf);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002132}
2133
2134
2135// -------------------------------------------------------------------------
2136// StringCharCodeAtGenerator
2137
2138void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002139 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002140 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2141 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002142
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002143 // Fetch the instance type of the receiver into result register.
2144 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2145 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2146 // If the receiver is not a string trigger the non-string case.
2147 __ testb(result_, Immediate(kIsNotStringMask));
2148 __ j(not_zero, receiver_not_string_);
2149 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002150
2151 // If the index is non-smi trigger the non-smi case.
2152 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002153 __ bind(&got_smi_index_);
2154
2155 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002156 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002157 __ j(above_equal, index_out_of_range_);
2158
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002159 __ SmiToInteger32(index_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002160
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002161 StringCharLoadGenerator::Generate(
2162 masm, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002163
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002164 __ Integer32ToSmi(result_, result_);
2165 __ bind(&exit_);
2166}
2167
2168
2169void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002170 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002171 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002173
Ben Murdoch257744e2011-11-30 15:57:28 +00002174 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002175 // Index is not a smi.
2176 __ bind(&index_not_smi_);
2177 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00002178 __ CheckMap(index_,
2179 factory->heap_number_map(),
2180 index_not_number_,
2181 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002182 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002183 if (embed_mode == PART_OF_IC_HANDLER) {
2184 __ Push(LoadWithVectorDescriptor::VectorRegister());
2185 __ Push(LoadDescriptor::SlotRegister());
2186 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002187 __ Push(object_);
2188 __ Push(index_); // Consumed by runtime conversion function.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002189 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002190 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002191 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002192 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002193 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002195 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002196 if (!index_.is(rax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002197 // Save the conversion result before the pop instructions below
2198 // have a chance to overwrite it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002199 __ movp(index_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002200 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201 __ Pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002202 if (embed_mode == PART_OF_IC_HANDLER) {
2203 __ Pop(LoadDescriptor::SlotRegister());
2204 __ Pop(LoadWithVectorDescriptor::VectorRegister());
2205 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002206 // Reload the instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002207 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002208 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2209 call_helper.AfterCall(masm);
2210 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002211 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002212 // Otherwise, return to the fast path.
2213 __ jmp(&got_smi_index_);
2214
2215 // Call runtime. We get here when the receiver is a string and the
2216 // index is a number, but the code of getting the actual character
2217 // is too complex (e.g., when the string needs to be flattened).
2218 __ bind(&call_runtime_);
2219 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002220 __ Push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002221 __ Integer32ToSmi(index_, index_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002222 __ Push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002223 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002224 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002225 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002226 }
2227 call_helper.AfterCall(masm);
2228 __ jmp(&exit_);
2229
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002231}
2232
2233
2234// -------------------------------------------------------------------------
2235// StringCharFromCodeGenerator
2236
2237void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2238 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2239 __ JumpIfNotSmi(code_, &slow_case_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002240 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002241 __ j(above, &slow_case_);
2242
2243 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2244 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002245 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002246 FixedArray::kHeaderSize));
2247 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2248 __ j(equal, &slow_case_);
2249 __ bind(&exit_);
2250}
2251
2252
2253void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002254 MacroAssembler* masm,
2255 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002256 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002257
2258 __ bind(&slow_case_);
2259 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002260 __ Push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002261 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002263 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002264 }
2265 call_helper.AfterCall(masm);
2266 __ jmp(&exit_);
2267
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002268 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002269}
2270
2271
2272void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2273 Register dest,
2274 Register src,
2275 Register count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002276 String::Encoding encoding) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002277 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00002278 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002279 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002280 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002281
2282 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002283 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002284 STATIC_ASSERT(2 == sizeof(uc16));
2285 __ addl(count, count);
2286 }
2287
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002288 // Copy remaining characters.
2289 Label loop;
2290 __ bind(&loop);
2291 __ movb(kScratchRegister, Operand(src, 0));
2292 __ movb(Operand(dest, 0), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002293 __ incp(src);
2294 __ incp(dest);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002295 __ decl(count);
2296 __ j(not_zero, &loop);
2297
2298 __ bind(&done);
2299}
2300
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002301
2302void SubStringStub::Generate(MacroAssembler* masm) {
2303 Label runtime;
2304
2305 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002306 // rsp[0] : return address
2307 // rsp[8] : to
2308 // rsp[16] : from
2309 // rsp[24] : string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002311 enum SubStringStubArgumentIndices {
2312 STRING_ARGUMENT_INDEX,
2313 FROM_ARGUMENT_INDEX,
2314 TO_ARGUMENT_INDEX,
2315 SUB_STRING_ARGUMENT_COUNT
2316 };
2317
2318 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2319 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002320
2321 // Make sure first argument is a string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002322 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002323 STATIC_ASSERT(kSmiTag == 0);
2324 __ testl(rax, Immediate(kSmiTagMask));
2325 __ j(zero, &runtime);
2326 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2327 __ j(NegateCondition(is_string), &runtime);
2328
2329 // rax: string
2330 // rbx: instance type
2331 // Calculate length of sub string using the smi values.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002332 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2333 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
Ben Murdochf87a2032010-10-22 12:50:53 +01002334 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002335
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002336 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002337 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002338 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 // Shorter than original string's length: an actual substring.
2340 __ j(below, &not_original_string, Label::kNear);
2341 // Longer than original string's length or negative: unsafe arguments.
2342 __ j(above, &runtime);
2343 // Return original string.
2344 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002345 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002347 __ bind(&not_original_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348
2349 Label single_char;
2350 __ SmiCompare(rcx, Smi::FromInt(1));
2351 __ j(equal, &single_char);
2352
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002353 __ SmiToInteger32(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002354
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002355 // rax: string
2356 // rbx: instance type
2357 // rcx: sub string length
2358 // rdx: from index (smi)
2359 // Deal with different string types: update the index if necessary
2360 // and put the underlying string into edi.
2361 Label underlying_unpacked, sliced_string, seq_or_external_string;
2362 // If the string is not indirect, it can only be sequential or external.
2363 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2364 STATIC_ASSERT(kIsIndirectStringMask != 0);
2365 __ testb(rbx, Immediate(kIsIndirectStringMask));
2366 __ j(zero, &seq_or_external_string, Label::kNear);
2367
2368 __ testb(rbx, Immediate(kSlicedNotConsMask));
2369 __ j(not_zero, &sliced_string, Label::kNear);
2370 // Cons string. Check whether it is flat, then fetch first part.
2371 // Flat cons strings have an empty second part.
2372 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002373 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002374 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002376 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002377 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002378 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002379 __ jmp(&underlying_unpacked, Label::kNear);
2380
2381 __ bind(&sliced_string);
2382 // Sliced string. Fetch parent and correct start index by offset.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002383 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
2384 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002385 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002386 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002387 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2388 __ jmp(&underlying_unpacked, Label::kNear);
2389
2390 __ bind(&seq_or_external_string);
2391 // Sequential or external string. Just move string to the correct register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002392 __ movp(rdi, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002393
2394 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002395
Ben Murdoch589d6972011-11-30 16:04:58 +00002396 if (FLAG_string_slices) {
2397 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002398 // rdi: underlying subject string
2399 // rbx: instance type of underlying subject string
2400 // rdx: adjusted start index (smi)
2401 // rcx: length
Ben Murdoch589d6972011-11-30 16:04:58 +00002402 // If coming from the make_two_character_string path, the string
2403 // is too short to be sliced anyways.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
Ben Murdoch589d6972011-11-30 16:04:58 +00002405 // Short slice. Copy instead of slicing.
2406 __ j(less, &copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002407 // Allocate new sliced string. At this point we do not reload the instance
2408 // type including the string encoding because we simply rely on the info
2409 // provided by the original string. It does not matter if the original
2410 // string's encoding is wrong because we always have to recheck encoding of
2411 // the newly created string's parent anyways due to externalized strings.
2412 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002414 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2415 __ testb(rbx, Immediate(kStringEncodingMask));
2416 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002418 __ jmp(&set_slice_header, Label::kNear);
2419 __ bind(&two_byte_slice);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002420 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002421 __ bind(&set_slice_header);
Ben Murdoch589d6972011-11-30 16:04:58 +00002422 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002423 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
2424 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00002425 Immediate(String::kEmptyHashField));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
2427 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002428 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002429 __ ret(3 * kPointerSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00002430
2431 __ bind(&copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002432 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002433
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002434 // rdi: underlying subject string
2435 // rbx: instance type of underlying subject string
2436 // rdx: adjusted start index (smi)
2437 // rcx: length
2438 // The subject string can only be external or sequential string of either
2439 // encoding at this point.
2440 Label two_byte_sequential, sequential_string;
2441 STATIC_ASSERT(kExternalStringTag != 0);
2442 STATIC_ASSERT(kSeqStringTag == 0);
2443 __ testb(rbx, Immediate(kExternalStringTag));
2444 __ j(zero, &sequential_string);
2445
2446 // Handle external string.
2447 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002448 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002449 __ testb(rbx, Immediate(kShortExternalStringMask));
2450 __ j(not_zero, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002452 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002453 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2454 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002455
2456 __ bind(&sequential_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002458 __ testb(rbx, Immediate(kStringEncodingMask));
2459 __ j(zero, &two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002460
2461 // Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002462 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002463
2464 // rax: result string
2465 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002466 { // Locate character of sub string start.
2467 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002468 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2469 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002470 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002471 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002472 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002473
2474 // rax: result string
2475 // rcx: result length
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002476 // r14: first character of result
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002477 // rsi: character of sub string start
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002478 StringHelper::GenerateCopyCharacters(
2479 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002480 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002481 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002482
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002483 __ bind(&two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002484 // Allocate the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002485 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002486
2487 // rax: result string
2488 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002489 { // Locate character of sub string start.
2490 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002491 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2492 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002493 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002494 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002495 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002496
2497 // rax: result string
2498 // rcx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002499 // rdi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002500 // r14: character of sub string start
2501 StringHelper::GenerateCopyCharacters(
2502 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002503 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002504 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002505
2506 // Just jump to runtime to create the sub string.
2507 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002508 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002509
2510 __ bind(&single_char);
2511 // rax: string
2512 // rbx: instance type
2513 // rcx: sub string length (smi)
2514 // rdx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002515 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
2516 &runtime, STRING_INDEX_IS_NUMBER,
2517 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002518 generator.GenerateFast(masm);
2519 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2520 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002521}
2522
2523
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002524void ToNumberStub::Generate(MacroAssembler* masm) {
2525 // The ToNumber stub takes one argument in rax.
2526 Label not_smi;
2527 __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
2528 __ Ret();
2529 __ bind(&not_smi);
2530
2531 Label not_heap_number;
2532 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2533 Heap::kHeapNumberMapRootIndex);
2534 __ j(not_equal, &not_heap_number, Label::kNear);
2535 __ Ret();
2536 __ bind(&not_heap_number);
2537
Ben Murdochda12d292016-06-02 14:46:10 +01002538 NonNumberToNumberStub stub(masm->isolate());
2539 __ TailCallStub(&stub);
2540}
2541
2542void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
2543 // The NonNumberToNumber stub takes one argument in rax.
2544 __ AssertNotNumber(rax);
2545
2546 Label not_string;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002547 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2548 // rax: object
2549 // rdi: object map
2550 __ j(above_equal, &not_string, Label::kNear);
Ben Murdochda12d292016-06-02 14:46:10 +01002551 StringToNumberStub stub(masm->isolate());
2552 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002553 __ bind(&not_string);
2554
2555 Label not_oddball;
2556 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2557 __ j(not_equal, &not_oddball, Label::kNear);
2558 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2559 __ Ret();
2560 __ bind(&not_oddball);
2561
2562 __ PopReturnAddressTo(rcx); // Pop return address.
2563 __ Push(rax); // Push argument.
2564 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565 __ TailCallRuntime(Runtime::kToNumber);
2566}
2567
Ben Murdochda12d292016-06-02 14:46:10 +01002568void StringToNumberStub::Generate(MacroAssembler* masm) {
2569 // The StringToNumber stub takes one argument in rax.
2570 __ AssertString(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002571
Ben Murdochda12d292016-06-02 14:46:10 +01002572 // Check if string has a cached array index.
2573 Label runtime;
2574 __ testl(FieldOperand(rax, String::kHashFieldOffset),
2575 Immediate(String::kContainsCachedArrayIndexMask));
2576 __ j(not_zero, &runtime, Label::kNear);
2577 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
2578 __ IndexFromHash(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002579 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580
Ben Murdochda12d292016-06-02 14:46:10 +01002581 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582 __ PopReturnAddressTo(rcx); // Pop return address.
2583 __ Push(rax); // Push argument.
2584 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdochda12d292016-06-02 14:46:10 +01002585 __ TailCallRuntime(Runtime::kStringToNumber);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002586}
2587
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002588void ToStringStub::Generate(MacroAssembler* masm) {
2589 // The ToString stub takes one argument in rax.
2590 Label is_number;
2591 __ JumpIfSmi(rax, &is_number, Label::kNear);
2592
2593 Label not_string;
2594 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2595 // rax: receiver
2596 // rdi: receiver map
2597 __ j(above_equal, &not_string, Label::kNear);
2598 __ Ret();
2599 __ bind(&not_string);
2600
2601 Label not_heap_number;
2602 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2603 __ j(not_equal, &not_heap_number, Label::kNear);
2604 __ bind(&is_number);
2605 NumberToStringStub stub(isolate());
2606 __ TailCallStub(&stub);
2607 __ bind(&not_heap_number);
2608
2609 Label not_oddball;
2610 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2611 __ j(not_equal, &not_oddball, Label::kNear);
2612 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2613 __ Ret();
2614 __ bind(&not_oddball);
2615
2616 __ PopReturnAddressTo(rcx); // Pop return address.
2617 __ Push(rax); // Push argument.
2618 __ PushReturnAddressFrom(rcx); // Push return address.
2619 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002620}
2621
2622
Ben Murdoch097c5b22016-05-18 11:27:45 +01002623void ToNameStub::Generate(MacroAssembler* masm) {
2624 // The ToName stub takes one argument in rax.
2625 Label is_number;
2626 __ JumpIfSmi(rax, &is_number, Label::kNear);
2627
2628 Label not_name;
2629 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2630 __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
2631 // rax: receiver
2632 // rdi: receiver map
2633 __ j(above, &not_name, Label::kNear);
2634 __ Ret();
2635 __ bind(&not_name);
2636
2637 Label not_heap_number;
2638 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2639 __ j(not_equal, &not_heap_number, Label::kNear);
2640 __ bind(&is_number);
2641 NumberToStringStub stub(isolate());
2642 __ TailCallStub(&stub);
2643 __ bind(&not_heap_number);
2644
2645 Label not_oddball;
2646 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2647 __ j(not_equal, &not_oddball, Label::kNear);
2648 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2649 __ Ret();
2650 __ bind(&not_oddball);
2651
2652 __ PopReturnAddressTo(rcx); // Pop return address.
2653 __ Push(rax); // Push argument.
2654 __ PushReturnAddressFrom(rcx); // Push return address.
2655 __ TailCallRuntime(Runtime::kToName);
2656}
2657
2658
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002659void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2660 Register left,
2661 Register right,
2662 Register scratch1,
2663 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002664 Register length = scratch1;
2665
2666 // Compare lengths.
2667 Label check_zero_length;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002668 __ movp(length, FieldOperand(left, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002669 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2670 __ j(equal, &check_zero_length, Label::kNear);
2671 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2672 __ ret(0);
2673
2674 // Check if the length is zero.
2675 Label compare_chars;
2676 __ bind(&check_zero_length);
2677 STATIC_ASSERT(kSmiTag == 0);
2678 __ SmiTest(length);
2679 __ j(not_zero, &compare_chars, Label::kNear);
2680 __ Move(rax, Smi::FromInt(EQUAL));
2681 __ ret(0);
2682
2683 // Compare characters.
2684 __ bind(&compare_chars);
2685 Label strings_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002686 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2687 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002688
2689 // Characters are equal.
2690 __ Move(rax, Smi::FromInt(EQUAL));
2691 __ ret(0);
2692
2693 // Characters are not equal.
2694 __ bind(&strings_not_equal);
2695 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2696 __ ret(0);
2697}
2698
2699
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002700void StringHelper::GenerateCompareFlatOneByteStrings(
2701 MacroAssembler* masm, Register left, Register right, Register scratch1,
2702 Register scratch2, Register scratch3, Register scratch4) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002703 // Ensure that you can always subtract a string length from a non-negative
2704 // number (e.g. another length).
2705 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
2706
2707 // Find minimum length and length difference.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
2709 __ movp(scratch4, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002710 __ SmiSub(scratch4,
2711 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002712 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002713 // Register scratch4 now holds left.length - right.length.
2714 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00002715 Label left_shorter;
2716 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002717 // The right string isn't longer that the left one.
2718 // Get the right string's length by subtracting the (non-negative) difference
2719 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002720 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002721 __ bind(&left_shorter);
2722 // Register scratch1 now holds Min(left.length, right.length).
2723 const Register min_length = scratch1;
2724
Ben Murdoch257744e2011-11-30 15:57:28 +00002725 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002726 // If min-length is zero, go directly to comparing lengths.
2727 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002728 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002729
Ben Murdoch257744e2011-11-30 15:57:28 +00002730 // Compare loop.
2731 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002732 GenerateOneByteCharsCompareLoop(
2733 masm, left, right, min_length, scratch2, &result_not_equal,
2734 // In debug-code mode, SmiTest below might push
2735 // the target label outside the near range.
2736 Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002737
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002738 // Completed loop without finding different characters.
2739 // Compare lengths (precomputed).
2740 __ bind(&compare_lengths);
2741 __ SmiTest(length_difference);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002742 Label length_not_equal;
2743 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002744
2745 // Result is EQUAL.
2746 __ Move(rax, Smi::FromInt(EQUAL));
2747 __ ret(0);
2748
Ben Murdoch257744e2011-11-30 15:57:28 +00002749 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002750 Label result_less;
2751 __ bind(&length_not_equal);
2752 __ j(greater, &result_greater, Label::kNear);
2753 __ jmp(&result_less, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002754 __ bind(&result_not_equal);
2755 // Unequal comparison of left to right, either character or length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002756 __ j(above, &result_greater, Label::kNear);
2757 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002758
2759 // Result is LESS.
2760 __ Move(rax, Smi::FromInt(LESS));
2761 __ ret(0);
2762
2763 // Result is GREATER.
2764 __ bind(&result_greater);
2765 __ Move(rax, Smi::FromInt(GREATER));
2766 __ ret(0);
2767}
2768
2769
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002770void StringHelper::GenerateOneByteCharsCompareLoop(
2771 MacroAssembler* masm, Register left, Register right, Register length,
2772 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002773 // Change index to run from -length to -1 by adding length to string
2774 // start. This means that loop ends when index reaches zero, which
2775 // doesn't need an additional compare.
2776 __ SmiToInteger32(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002777 __ leap(left,
2778 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2779 __ leap(right,
2780 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2781 __ negq(length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002782 Register index = length; // index = -length;
2783
2784 // Compare loop.
2785 Label loop;
2786 __ bind(&loop);
2787 __ movb(scratch, Operand(left, index, times_1, 0));
2788 __ cmpb(scratch, Operand(right, index, times_1, 0));
2789 __ j(not_equal, chars_not_equal, near_jump);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002790 __ incq(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00002791 __ j(not_zero, &loop);
2792}
2793
2794
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002795void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2796 // ----------- S t a t e -------------
2797 // -- rdx : left
2798 // -- rax : right
2799 // -- rsp[0] : return address
2800 // -----------------------------------
2801
2802 // Load rcx with the allocation site. We stick an undefined dummy value here
2803 // and replace it with the real allocation site later when we instantiate this
2804 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2805 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
2806
2807 // Make sure that we actually patched the allocation site.
2808 if (FLAG_debug_code) {
2809 __ testb(rcx, Immediate(kSmiTagMask));
2810 __ Assert(not_equal, kExpectedAllocationSite);
2811 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2812 isolate()->factory()->allocation_site_map());
2813 __ Assert(equal, kExpectedAllocationSite);
2814 }
2815
2816 // Tail call into the stub that handles binary operations with allocation
2817 // sites.
2818 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2819 __ TailCallStub(&stub);
2820}
2821
2822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002823void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2824 DCHECK_EQ(CompareICState::BOOLEAN, state());
2825 Label miss;
2826 Label::Distance const miss_distance =
2827 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2828
2829 __ JumpIfSmi(rdx, &miss, miss_distance);
2830 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
2831 __ JumpIfSmi(rax, &miss, miss_distance);
2832 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2833 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2834 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002835 if (!Token::IsEqualityOp(op())) {
2836 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2837 __ AssertSmi(rax);
2838 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
2839 __ AssertSmi(rdx);
2840 __ pushq(rax);
2841 __ movq(rax, rdx);
2842 __ popq(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002843 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002844 __ subp(rax, rdx);
2845 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002846
2847 __ bind(&miss);
2848 GenerateMiss(masm);
2849}
2850
2851
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002852void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2853 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00002854 Label miss;
2855 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002856
2857 if (GetCondition() == equal) {
2858 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002859 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002860 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002861 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002862 __ subp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002863 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002864 // Correct sign of result in case of overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002865 __ notp(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002866 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002867 __ movp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002868 }
2869 __ ret(0);
2870
2871 __ bind(&miss);
2872 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002873}
2874
2875
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002876void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2877 DCHECK(state() == CompareICState::NUMBER);
Steve Block1e0659c2011-05-24 12:43:12 +01002878
Ben Murdoch257744e2011-11-30 15:57:28 +00002879 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002880 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00002881 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01002882
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002883 if (left() == CompareICState::SMI) {
2884 __ JumpIfNotSmi(rdx, &miss);
2885 }
2886 if (right() == CompareICState::SMI) {
2887 __ JumpIfNotSmi(rax, &miss);
2888 }
2889
2890 // Load left and right operand.
2891 Label done, left, left_smi, right_smi;
2892 __ JumpIfSmi(rax, &right_smi, Label::kNear);
2893 __ CompareMap(rax, isolate()->factory()->heap_number_map());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002894 __ j(not_equal, &maybe_undefined1, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002895 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002896 __ jmp(&left, Label::kNear);
2897 __ bind(&right_smi);
2898 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
2899 __ Cvtlsi2sd(xmm1, rcx);
Steve Block1e0659c2011-05-24 12:43:12 +01002900
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002901 __ bind(&left);
2902 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2903 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2904 __ j(not_equal, &maybe_undefined2, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002905 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002906 __ jmp(&done);
2907 __ bind(&left_smi);
2908 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
2909 __ Cvtlsi2sd(xmm0, rcx);
2910
2911 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01002912 // Compare operands
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002913 __ Ucomisd(xmm0, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002914
2915 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00002916 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002917
2918 // Return a result of -1, 0, or 1, based on EFLAGS.
2919 // Performing mov, because xor would destroy the flag register.
2920 __ movl(rax, Immediate(0));
2921 __ movl(rcx, Immediate(0));
2922 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002923 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
Steve Block1e0659c2011-05-24 12:43:12 +01002924 __ ret(0);
2925
2926 __ bind(&unordered);
Steve Block1e0659c2011-05-24 12:43:12 +01002927 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002928 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002929 CompareICState::GENERIC, CompareICState::GENERIC);
Steve Block1e0659c2011-05-24 12:43:12 +01002930 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2931
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002932 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002933 if (Token::IsOrderedRelationalCompareOp(op())) {
2934 __ Cmp(rax, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002935 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002936 __ JumpIfSmi(rdx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002937 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
2938 __ j(not_equal, &maybe_undefined2, Label::kNear);
2939 __ jmp(&unordered);
2940 }
2941
2942 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943 if (Token::IsOrderedRelationalCompareOp(op())) {
2944 __ Cmp(rdx, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002945 __ j(equal, &unordered);
2946 }
2947
Steve Block1e0659c2011-05-24 12:43:12 +01002948 __ bind(&miss);
2949 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002950}
2951
2952
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002953void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2954 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2955 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002956
2957 // Registers containing left and right operands respectively.
2958 Register left = rdx;
2959 Register right = rax;
2960 Register tmp1 = rcx;
2961 Register tmp2 = rbx;
2962
2963 // Check that both operands are heap objects.
2964 Label miss;
2965 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2966 __ j(cond, &miss, Label::kNear);
2967
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968 // Check that both operands are internalized strings.
2969 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2970 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2971 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2972 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2973 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2974 __ orp(tmp1, tmp2);
2975 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2976 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002977
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002978 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002979 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002980 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002981 // Make sure rax is non-zero. At this point input operands are
2982 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002983 DCHECK(right.is(rax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002984 __ j(not_equal, &done, Label::kNear);
2985 STATIC_ASSERT(EQUAL == 0);
2986 STATIC_ASSERT(kSmiTag == 0);
2987 __ Move(rax, Smi::FromInt(EQUAL));
2988 __ bind(&done);
2989 __ ret(0);
2990
2991 __ bind(&miss);
2992 GenerateMiss(masm);
2993}
2994
2995
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002996void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2997 DCHECK(state() == CompareICState::UNIQUE_NAME);
2998 DCHECK(GetCondition() == equal);
2999
3000 // Registers containing left and right operands respectively.
3001 Register left = rdx;
3002 Register right = rax;
3003 Register tmp1 = rcx;
3004 Register tmp2 = rbx;
3005
3006 // Check that both operands are heap objects.
3007 Label miss;
3008 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3009 __ j(cond, &miss, Label::kNear);
3010
3011 // Check that both operands are unique names. This leaves the instance
3012 // types loaded in tmp1 and tmp2.
3013 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3014 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3015 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3016 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3017
3018 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3019 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3020
3021 // Unique names are compared by identity.
3022 Label done;
3023 __ cmpp(left, right);
3024 // Make sure rax is non-zero. At this point input operands are
3025 // guaranteed to be non-zero.
3026 DCHECK(right.is(rax));
3027 __ j(not_equal, &done, Label::kNear);
3028 STATIC_ASSERT(EQUAL == 0);
3029 STATIC_ASSERT(kSmiTag == 0);
3030 __ Move(rax, Smi::FromInt(EQUAL));
3031 __ bind(&done);
3032 __ ret(0);
3033
3034 __ bind(&miss);
3035 GenerateMiss(masm);
3036}
3037
3038
3039void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3040 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003041 Label miss;
3042
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003043 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003044
Ben Murdoch257744e2011-11-30 15:57:28 +00003045 // Registers containing left and right operands respectively.
3046 Register left = rdx;
3047 Register right = rax;
3048 Register tmp1 = rcx;
3049 Register tmp2 = rbx;
3050 Register tmp3 = rdi;
3051
3052 // Check that both operands are heap objects.
3053 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3054 __ j(cond, &miss);
3055
3056 // Check that both operands are strings. This leaves the instance
3057 // types loaded in tmp1 and tmp2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003058 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3059 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3060 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3061 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3062 __ movp(tmp3, tmp1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003063 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003064 __ orp(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003065 __ testb(tmp3, Immediate(kIsNotStringMask));
3066 __ j(not_zero, &miss);
3067
3068 // Fast check for identical strings.
3069 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003070 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003071 __ j(not_equal, &not_same, Label::kNear);
3072 STATIC_ASSERT(EQUAL == 0);
3073 STATIC_ASSERT(kSmiTag == 0);
3074 __ Move(rax, Smi::FromInt(EQUAL));
3075 __ ret(0);
3076
3077 // Handle not identical strings.
3078 __ bind(&not_same);
3079
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003080 // Check that both strings are internalized strings. If they are, we're done
3081 // because we already know they are not identical. We also know they are both
3082 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003083 if (equality) {
3084 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003085 STATIC_ASSERT(kInternalizedTag == 0);
3086 __ orp(tmp1, tmp2);
3087 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
3088 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003089 // Make sure rax is non-zero. At this point input operands are
3090 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003091 DCHECK(right.is(rax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003092 __ ret(0);
3093 __ bind(&do_compare);
3094 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003095
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003096 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003097 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003098 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003099
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003100 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003101 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3103 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003104 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003105 StringHelper::GenerateCompareFlatOneByteStrings(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003106 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3107 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003108
3109 // Handle more complex cases in runtime.
3110 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003111 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01003112 {
3113 FrameScope scope(masm, StackFrame::INTERNAL);
3114 __ Push(left);
3115 __ Push(right);
3116 __ CallRuntime(Runtime::kStringEqual);
3117 }
3118 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
3119 __ subp(rax, rdx);
3120 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003121 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003122 __ PopReturnAddressTo(tmp1);
3123 __ Push(left);
3124 __ Push(right);
3125 __ PushReturnAddressFrom(tmp1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003126 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003127 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003128
3129 __ bind(&miss);
3130 GenerateMiss(masm);
3131}
3132
3133
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003134void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3135 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00003136 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01003137 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003138 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003140 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3141 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
3142 __ j(below, &miss, Label::kNear);
3143 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
3144 __ j(below, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003146 DCHECK_EQ(equal, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003147 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003148 __ ret(0);
3149
3150 __ bind(&miss);
3151 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003152}
3153
3154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003155void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003156 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003157 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003158 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3159 __ j(either_smi, &miss, Label::kNear);
3160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003161 __ GetWeakValue(rdi, cell);
3162 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003163 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003164 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003165 __ j(not_equal, &miss, Label::kNear);
3166
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003167 if (Token::IsEqualityOp(op())) {
3168 __ subp(rax, rdx);
3169 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003170 } else {
3171 __ PopReturnAddressTo(rcx);
3172 __ Push(rdx);
3173 __ Push(rax);
3174 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
3175 __ PushReturnAddressFrom(rcx);
3176 __ TailCallRuntime(Runtime::kCompare);
3177 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003178
3179 __ bind(&miss);
3180 GenerateMiss(masm);
3181}
3182
3183
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003184void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003185 {
3186 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003187 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003188 __ Push(rdx);
3189 __ Push(rax);
3190 __ Push(rdx);
3191 __ Push(rax);
3192 __ Push(Smi::FromInt(op()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003193 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003194
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003195 // Compute the entry point of the rewritten stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003196 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3197 __ Pop(rax);
3198 __ Pop(rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003199 }
Steve Block1e0659c2011-05-24 12:43:12 +01003200
Steve Block1e0659c2011-05-24 12:43:12 +01003201 // Do a tail call to the rewritten stub.
3202 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003203}
3204
Steve Block1e0659c2011-05-24 12:43:12 +01003205
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003206void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3207 Label* miss,
3208 Label* done,
3209 Register properties,
3210 Handle<Name> name,
3211 Register r0) {
3212 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003213 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3214 // not equal to the name and kProbes-th slot is not used (its name is the
3215 // undefined value), it guarantees the hash table doesn't contain the
3216 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003217 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003218 for (int i = 0; i < kInlinedProbes; i++) {
3219 // r0 points to properties hash.
3220 // Compute the masked index: (hash + i + i * i) & mask.
3221 Register index = r0;
3222 // Capacity is smi 2^n.
3223 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3224 __ decl(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225 __ andp(index,
3226 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003227
3228 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003229 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003231
3232 Register entity_name = r0;
3233 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003234 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003235 __ movp(entity_name, Operand(properties,
Ben Murdoch257744e2011-11-30 15:57:28 +00003236 index,
3237 times_pointer_size,
3238 kElementsStartOffset - kHeapObjectTag));
3239 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3240 __ j(equal, done);
3241
3242 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 __ Cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003244 __ j(equal, miss);
3245
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003246 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003247 // Check for the hole and skip.
3248 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003249 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003251 // Check if the entry name is not a unique name.
3252 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3253 __ JumpIfNotUniqueNameInstanceType(
3254 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3255 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003256 }
3257
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003258 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3259 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003260 __ Push(Handle<Object>(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003261 __ Push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003262 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003263 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003264 __ j(not_zero, miss);
3265 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003266}
3267
3268
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003269// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003270// |done| label if a property with the given name is found leaving the
3271// index into the dictionary in |r1|. Jump to the |miss| label
3272// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003273void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3274 Label* miss,
3275 Label* done,
3276 Register elements,
3277 Register name,
3278 Register r0,
3279 Register r1) {
3280 DCHECK(!elements.is(r0));
3281 DCHECK(!elements.is(r1));
3282 DCHECK(!name.is(r0));
3283 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003285 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003286
3287 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3288 __ decl(r0);
3289
3290 for (int i = 0; i < kInlinedProbes; i++) {
3291 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003292 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3293 __ shrl(r1, Immediate(Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003294 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003295 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003296 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003297 __ andp(r1, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003298
3299 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003300 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003301 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
Ben Murdoch257744e2011-11-30 15:57:28 +00003302
3303 // Check if the key is identical to the name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003304 __ cmpp(name, Operand(elements, r1, times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003305 kElementsStartOffset - kHeapObjectTag));
3306 __ j(equal, done);
3307 }
3308
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3310 POSITIVE_LOOKUP);
3311 __ Push(name);
3312 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3313 __ shrl(r0, Immediate(Name::kHashShift));
3314 __ Push(r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003315 __ CallStub(&stub);
3316
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003317 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003318 __ j(zero, miss);
3319 __ jmp(done);
3320}
3321
3322
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003323void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003324 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3325 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003326 // Stack frame on entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003327 // rsp[0 * kPointerSize] : return address.
3328 // rsp[1 * kPointerSize] : key's hash.
3329 // rsp[2 * kPointerSize] : key.
Ben Murdoch257744e2011-11-30 15:57:28 +00003330 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003331 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003332 // result_: used as scratch.
3333 // index_: will hold an index of entry if lookup is successful.
3334 // might alias with result_.
3335 // Returns:
3336 // result_ is zero if lookup failed, non zero otherwise.
3337
3338 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3339
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003340 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003341
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003342 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003343 __ decl(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 __ Push(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003345
3346 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3347 // not equal to the name and kProbes-th slot is not used (its name is the
3348 // undefined value), it guarantees the hash table doesn't contain the
3349 // property. It's true even if some slots represent deleted properties
3350 // (their names are the null value).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003351 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3352 kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003353 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3354 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003355 __ movp(scratch, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003356 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003358 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003359 __ andp(scratch, Operand(rsp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003360
3361 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003362 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003363 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003364
3365 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003366 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003367 kElementsStartOffset - kHeapObjectTag));
3368
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003369 __ Cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003370 __ j(equal, &not_in_dictionary);
3371
3372 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003373 __ cmpp(scratch, args.GetArgumentOperand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003374 __ j(equal, &in_dictionary);
3375
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3377 // If we hit a key that is not a unique name during negative
3378 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003379 // key we are looking for.
3380
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003381 // Check if the entry name is not a unique name.
3382 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3383 __ JumpIfNotUniqueNameInstanceType(
3384 FieldOperand(scratch, Map::kInstanceTypeOffset),
3385 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003386 }
3387 }
3388
3389 __ bind(&maybe_in_dictionary);
3390 // If we are doing negative lookup then probing failure should be
3391 // treated as a lookup success. For positive lookup probing failure
3392 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003393 if (mode() == POSITIVE_LOOKUP) {
3394 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003395 __ Drop(1);
3396 __ ret(2 * kPointerSize);
3397 }
3398
3399 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 __ movp(scratch, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003401 __ Drop(1);
3402 __ ret(2 * kPointerSize);
3403
3404 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003405 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003406 __ Drop(1);
3407 __ ret(2 * kPointerSize);
3408}
3409
3410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003411void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3412 Isolate* isolate) {
3413 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3414 stub1.GetCode();
3415 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3416 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003417}
3418
3419
3420// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3421// the value has just been written into the object, now this stub makes sure
3422// we keep the GC informed. The word in the object where the value has been
3423// written is in the address register.
3424void RecordWriteStub::Generate(MacroAssembler* masm) {
3425 Label skip_to_incremental_noncompacting;
3426 Label skip_to_incremental_compacting;
3427
3428 // The first two instructions are generated with labels so as to get the
3429 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3430 // forth between a compare instructions (a nop in this position) and the
3431 // real branch when we start and stop incremental heap marking.
3432 // See RecordWriteStub::Patch for details.
3433 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3434 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3435
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003436 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3437 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003438 MacroAssembler::kReturnAtEnd);
3439 } else {
3440 __ ret(0);
3441 }
3442
3443 __ bind(&skip_to_incremental_noncompacting);
3444 GenerateIncremental(masm, INCREMENTAL);
3445
3446 __ bind(&skip_to_incremental_compacting);
3447 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3448
3449 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3450 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3451 masm->set_byte_at(0, kTwoByteNopInstruction);
3452 masm->set_byte_at(2, kFiveByteNopInstruction);
3453}
3454
3455
3456void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3457 regs_.Save(masm);
3458
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003459 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003460 Label dont_need_remembered_set;
3461
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003462 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003463 __ JumpIfNotInNewSpace(regs_.scratch0(),
3464 regs_.scratch0(),
3465 &dont_need_remembered_set);
3466
Ben Murdoch097c5b22016-05-18 11:27:45 +01003467 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3468 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003469
3470 // First notify the incremental marker if necessary, then update the
3471 // remembered set.
3472 CheckNeedsToInformIncrementalMarker(
3473 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003474 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003475 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003476 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003477 MacroAssembler::kReturnAtEnd);
3478
3479 __ bind(&dont_need_remembered_set);
3480 }
3481
3482 CheckNeedsToInformIncrementalMarker(
3483 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003485 regs_.Restore(masm);
3486 __ ret(0);
3487}
3488
3489
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003490void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3491 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003492 Register address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003493 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
3494 DCHECK(!address.is(regs_.object()));
3495 DCHECK(!address.is(arg_reg_1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003496 __ Move(address, regs_.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003497 __ Move(arg_reg_1, regs_.object());
3498 // TODO(gc) Can we just set address arg2 in the beginning?
3499 __ Move(arg_reg_2, address);
3500 __ LoadAddress(arg_reg_3,
3501 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003502 int argument_count = 3;
3503
3504 AllowExternalCallThatCantCauseGC scope(masm);
3505 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003506 __ CallCFunction(
3507 ExternalReference::incremental_marking_record_write_function(isolate()),
3508 argument_count);
3509 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003510}
3511
3512
3513void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3514 MacroAssembler* masm,
3515 OnNoNeedToInformIncrementalMarker on_no_need,
3516 Mode mode) {
3517 Label on_black;
3518 Label need_incremental;
3519 Label need_incremental_pop_object;
3520
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003521 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3522 __ andp(regs_.scratch0(), regs_.object());
3523 __ movp(regs_.scratch1(),
3524 Operand(regs_.scratch0(),
3525 MemoryChunk::kWriteBarrierCounterOffset));
3526 __ subp(regs_.scratch1(), Immediate(1));
3527 __ movp(Operand(regs_.scratch0(),
3528 MemoryChunk::kWriteBarrierCounterOffset),
3529 regs_.scratch1());
3530 __ j(negative, &need_incremental);
3531
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003532 // Let's look at the color of the object: If it is not black we don't have
3533 // to inform the incremental marker.
3534 __ JumpIfBlack(regs_.object(),
3535 regs_.scratch0(),
3536 regs_.scratch1(),
3537 &on_black,
3538 Label::kNear);
3539
3540 regs_.Restore(masm);
3541 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003542 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003543 MacroAssembler::kReturnAtEnd);
3544 } else {
3545 __ ret(0);
3546 }
3547
3548 __ bind(&on_black);
3549
3550 // Get the value from the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003551 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003552
3553 if (mode == INCREMENTAL_COMPACTION) {
3554 Label ensure_not_white;
3555
3556 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3557 regs_.scratch1(), // Scratch.
3558 MemoryChunk::kEvacuationCandidateMask,
3559 zero,
3560 &ensure_not_white,
3561 Label::kNear);
3562
3563 __ CheckPageFlag(regs_.object(),
3564 regs_.scratch1(), // Scratch.
3565 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3566 zero,
3567 &need_incremental);
3568
3569 __ bind(&ensure_not_white);
3570 }
3571
3572 // We need an extra register for this, so we push the object register
3573 // temporarily.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003574 __ Push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003575 __ JumpIfWhite(regs_.scratch0(), // The value.
3576 regs_.scratch1(), // Scratch.
3577 regs_.object(), // Scratch.
3578 &need_incremental_pop_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003579 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003580
3581 regs_.Restore(masm);
3582 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003583 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003584 MacroAssembler::kReturnAtEnd);
3585 } else {
3586 __ ret(0);
3587 }
3588
3589 __ bind(&need_incremental_pop_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003590 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003591
3592 __ bind(&need_incremental);
3593
3594 // Fall through when we need to inform the incremental marker.
3595}
3596
3597
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003598void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3599 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3600 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3601 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003602 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003603 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
3604 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3605 __ PopReturnAddressTo(rcx);
3606 int additional_offset =
3607 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3608 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
3609 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
3610}
3611
3612
3613void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003614 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3615 LoadICStub stub(isolate(), state());
3616 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003617}
3618
3619
3620void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003621 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3622 KeyedLoadICStub stub(isolate(), state());
3623 stub.GenerateForTrampoline(masm);
3624}
3625
3626
3627static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3628 Register receiver_map, Register scratch1,
3629 Register scratch2, Register scratch3,
3630 bool is_polymorphic, Label* miss) {
3631 // feedback initially contains the feedback array
3632 Label next_loop, prepare_next;
3633 Label start_polymorphic;
3634
3635 Register counter = scratch1;
3636 Register length = scratch2;
3637 Register cached_map = scratch3;
3638
3639 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3640 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3641 __ j(not_equal, &start_polymorphic);
3642
3643 // found, now call handler.
3644 Register handler = feedback;
3645 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3646 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3647 __ jmp(handler);
3648
3649 // Polymorphic, we have to loop from 2 to N
3650 __ bind(&start_polymorphic);
3651 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3652 if (!is_polymorphic) {
3653 // If the IC could be monomorphic we have to make sure we don't go past the
3654 // end of the feedback array.
3655 __ cmpl(length, Immediate(2));
3656 __ j(equal, miss);
3657 }
3658 __ movl(counter, Immediate(2));
3659
3660 __ bind(&next_loop);
3661 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3662 FixedArray::kHeaderSize));
3663 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3664 __ j(not_equal, &prepare_next);
3665 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
3666 FixedArray::kHeaderSize + kPointerSize));
3667 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3668 __ jmp(handler);
3669
3670 __ bind(&prepare_next);
3671 __ addl(counter, Immediate(2));
3672 __ cmpl(counter, length);
3673 __ j(less, &next_loop);
3674
3675 // We exhausted our array of map handler pairs.
3676 __ jmp(miss);
3677}
3678
3679
3680static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3681 Register receiver_map, Register feedback,
3682 Register vector, Register integer_slot,
3683 Label* compare_map, Label* load_smi_map,
3684 Label* try_array) {
3685 __ JumpIfSmi(receiver, load_smi_map);
3686 __ movp(receiver_map, FieldOperand(receiver, 0));
3687
3688 __ bind(compare_map);
3689 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
3690 __ j(not_equal, try_array);
3691 Register handler = feedback;
3692 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
3693 FixedArray::kHeaderSize + kPointerSize));
3694 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3695 __ jmp(handler);
3696}
3697
3698
3699void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3700
3701
3702void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3703 GenerateImpl(masm, true);
3704}
3705
3706
3707void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3708 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3709 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx
3710 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3711 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3712 Register feedback = rdi;
3713 Register integer_slot = r8;
3714 Register receiver_map = r9;
3715
3716 __ SmiToInteger32(integer_slot, slot);
3717 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3718 FixedArray::kHeaderSize));
3719
3720 // Try to quickly handle the monomorphic case without knowing for sure
3721 // if we have a weak cell in feedback. We do know it's safe to look
3722 // at WeakCell::kValueOffset.
3723 Label try_array, load_smi_map, compare_map;
3724 Label not_array, miss;
3725 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3726 integer_slot, &compare_map, &load_smi_map, &try_array);
3727
3728 // Is it a fixed array?
3729 __ bind(&try_array);
3730 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3731 __ j(not_equal, &not_array);
3732 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3733 &miss);
3734
3735 __ bind(&not_array);
3736 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3737 __ j(not_equal, &miss);
3738 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3739 Code::ComputeHandlerFlags(Code::LOAD_IC));
3740 masm->isolate()->stub_cache()->GenerateProbe(
3741 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
3742
3743 __ bind(&miss);
3744 LoadIC::GenerateMiss(masm);
3745
3746 __ bind(&load_smi_map);
3747 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3748 __ jmp(&compare_map);
3749}
3750
3751
3752void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3753 GenerateImpl(masm, false);
3754}
3755
3756
3757void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3758 GenerateImpl(masm, true);
3759}
3760
3761
3762void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3763 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3764 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx
3765 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3766 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3767 Register feedback = rdi;
3768 Register integer_slot = r8;
3769 Register receiver_map = r9;
3770
3771 __ SmiToInteger32(integer_slot, slot);
3772 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3773 FixedArray::kHeaderSize));
3774
3775 // Try to quickly handle the monomorphic case without knowing for sure
3776 // if we have a weak cell in feedback. We do know it's safe to look
3777 // at WeakCell::kValueOffset.
3778 Label try_array, load_smi_map, compare_map;
3779 Label not_array, miss;
3780 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3781 integer_slot, &compare_map, &load_smi_map, &try_array);
3782
3783 __ bind(&try_array);
3784 // Is it a fixed array?
3785 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3786 __ j(not_equal, &not_array);
3787
3788 // We have a polymorphic element handler.
3789 Label polymorphic, try_poly_name;
3790 __ bind(&polymorphic);
3791 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3792 &miss);
3793
3794 __ bind(&not_array);
3795 // Is it generic?
3796 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3797 __ j(not_equal, &try_poly_name);
3798 Handle<Code> megamorphic_stub =
3799 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3800 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3801
3802 __ bind(&try_poly_name);
3803 // We might have a name in feedback, and a fixed array in the next slot.
3804 __ cmpp(key, feedback);
3805 __ j(not_equal, &miss);
3806 // If the name comparison succeeded, we know we have a fixed array with
3807 // at least one map/handler pair.
3808 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3809 FixedArray::kHeaderSize + kPointerSize));
3810 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false,
3811 &miss);
3812
3813 __ bind(&miss);
3814 KeyedLoadIC::GenerateMiss(masm);
3815
3816 __ bind(&load_smi_map);
3817 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3818 __ jmp(&compare_map);
3819}
3820
3821
3822void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3823 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3824 VectorStoreICStub stub(isolate(), state());
3825 stub.GenerateForTrampoline(masm);
3826}
3827
3828
3829void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3830 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3831 VectorKeyedStoreICStub stub(isolate(), state());
3832 stub.GenerateForTrampoline(masm);
3833}
3834
3835
3836void VectorStoreICStub::Generate(MacroAssembler* masm) {
3837 GenerateImpl(masm, false);
3838}
3839
3840
3841void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3842 GenerateImpl(masm, true);
3843}
3844
3845
3846void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3847 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3848 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3849 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3850 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3851 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3852 Register feedback = r8;
3853 Register integer_slot = r9;
3854 Register receiver_map = r11;
3855 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3856
3857 __ SmiToInteger32(integer_slot, slot);
3858 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3859 FixedArray::kHeaderSize));
3860
3861 // Try to quickly handle the monomorphic case without knowing for sure
3862 // if we have a weak cell in feedback. We do know it's safe to look
3863 // at WeakCell::kValueOffset.
3864 Label try_array, load_smi_map, compare_map;
3865 Label not_array, miss;
3866 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3867 integer_slot, &compare_map, &load_smi_map, &try_array);
3868
3869 // Is it a fixed array?
3870 __ bind(&try_array);
3871 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3872 __ j(not_equal, &not_array);
3873 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true,
3874 &miss);
3875
3876 __ bind(&not_array);
3877 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3878 __ j(not_equal, &miss);
3879
3880 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3881 Code::ComputeHandlerFlags(Code::STORE_IC));
3882 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3883 receiver, key, feedback, no_reg);
3884
3885 __ bind(&miss);
3886 StoreIC::GenerateMiss(masm);
3887
3888 __ bind(&load_smi_map);
3889 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3890 __ jmp(&compare_map);
3891}
3892
3893
3894void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3895 GenerateImpl(masm, false);
3896}
3897
3898
3899void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3900 GenerateImpl(masm, true);
3901}
3902
3903
3904static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3905 Register receiver_map,
3906 Register feedback, Register scratch,
3907 Register scratch1,
3908 Register scratch2, Label* miss) {
3909 // feedback initially contains the feedback array
3910 Label next, next_loop, prepare_next;
3911 Label transition_call;
3912
3913 Register cached_map = scratch;
3914 Register counter = scratch1;
3915 Register length = scratch2;
3916
3917 // Polymorphic, we have to loop from 0 to N - 1
3918 __ movp(counter, Immediate(0));
3919 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3920 __ SmiToInteger32(length, length);
3921
3922 __ bind(&next_loop);
3923 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3924 FixedArray::kHeaderSize));
3925 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3926 __ j(not_equal, &prepare_next);
3927 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3928 FixedArray::kHeaderSize + kPointerSize));
3929 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3930 __ j(not_equal, &transition_call);
3931 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3932 FixedArray::kHeaderSize + 2 * kPointerSize));
3933 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3934 __ jmp(feedback);
3935
3936 __ bind(&transition_call);
3937 DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3938 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3939 // The weak cell may have been cleared.
3940 __ JumpIfSmi(receiver_map, miss);
3941 // Get the handler in value.
3942 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3943 FixedArray::kHeaderSize + 2 * kPointerSize));
3944 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3945 __ jmp(feedback);
3946
3947 __ bind(&prepare_next);
3948 __ addl(counter, Immediate(3));
3949 __ cmpl(counter, length);
3950 __ j(less, &next_loop);
3951
3952 // We exhausted our array of map handler pairs.
3953 __ jmp(miss);
3954}
3955
3956
3957void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3958 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3959 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3960 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3961 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3962 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3963 Register feedback = r8;
3964 Register integer_slot = r9;
3965 Register receiver_map = r11;
3966 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3967
3968 __ SmiToInteger32(integer_slot, slot);
3969 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3970 FixedArray::kHeaderSize));
3971
3972 // Try to quickly handle the monomorphic case without knowing for sure
3973 // if we have a weak cell in feedback. We do know it's safe to look
3974 // at WeakCell::kValueOffset.
3975 Label try_array, load_smi_map, compare_map;
3976 Label not_array, miss;
3977 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3978 integer_slot, &compare_map, &load_smi_map, &try_array);
3979
3980 // Is it a fixed array?
3981 __ bind(&try_array);
3982 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3983 __ j(not_equal, &not_array);
3984 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
3985 r15, r14, &miss);
3986
3987 __ bind(&not_array);
3988 Label try_poly_name;
3989 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3990 __ j(not_equal, &try_poly_name);
3991
3992 Handle<Code> megamorphic_stub =
3993 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3994 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3995
3996 __ bind(&try_poly_name);
3997 // We might have a name in feedback, and a fixed array in the next slot.
3998 __ cmpp(key, feedback);
3999 __ j(not_equal, &miss);
4000 // If the name comparison succeeded, we know we have a fixed array with
4001 // at least one map/handler pair.
4002 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4003 FixedArray::kHeaderSize + kPointerSize));
4004 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
4005 &miss);
4006
4007 __ bind(&miss);
4008 KeyedStoreIC::GenerateMiss(masm);
4009
4010 __ bind(&load_smi_map);
4011 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4012 __ jmp(&compare_map);
4013}
4014
4015
4016void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4017 __ EmitLoadTypeFeedbackVector(rbx);
4018 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004019 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4020}
4021
4022
4023void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4024 if (masm->isolate()->function_entry_hook() != NULL) {
4025 ProfileEntryHookStub stub(masm->isolate());
4026 masm->CallStub(&stub);
4027 }
4028}
4029
4030
4031void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4032 // This stub can be called from essentially anywhere, so it needs to save
4033 // all volatile and callee-save registers.
4034 const size_t kNumSavedRegisters = 2;
4035 __ pushq(arg_reg_1);
4036 __ pushq(arg_reg_2);
4037
4038 // Calculate the original stack pointer and store it in the second arg.
4039 __ leap(arg_reg_2,
4040 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4041
4042 // Calculate the function address to the first arg.
4043 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4044 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4045
4046 // Save the remainder of the volatile registers.
4047 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4048
4049 // Call the entry hook function.
4050 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4051 Assembler::RelocInfoNone());
4052
4053 AllowExternalCallThatCantCauseGC scope(masm);
4054
4055 const int kArgumentCount = 2;
4056 __ PrepareCallCFunction(kArgumentCount);
4057 __ CallCFunction(rax, kArgumentCount);
4058
4059 // Restore volatile regs.
4060 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4061 __ popq(arg_reg_2);
4062 __ popq(arg_reg_1);
4063
4064 __ Ret();
4065}
4066
4067
4068template<class T>
4069static void CreateArrayDispatch(MacroAssembler* masm,
4070 AllocationSiteOverrideMode mode) {
4071 if (mode == DISABLE_ALLOCATION_SITES) {
4072 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4073 __ TailCallStub(&stub);
4074 } else if (mode == DONT_OVERRIDE) {
4075 int last_index = GetSequenceIndexFromFastElementsKind(
4076 TERMINAL_FAST_ELEMENTS_KIND);
4077 for (int i = 0; i <= last_index; ++i) {
4078 Label next;
4079 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4080 __ cmpl(rdx, Immediate(kind));
4081 __ j(not_equal, &next);
4082 T stub(masm->isolate(), kind);
4083 __ TailCallStub(&stub);
4084 __ bind(&next);
4085 }
4086
4087 // If we reached this point there is a problem.
4088 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4089 } else {
4090 UNREACHABLE();
4091 }
4092}
4093
4094
4095static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4096 AllocationSiteOverrideMode mode) {
4097 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4098 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4099 // rax - number of arguments
4100 // rdi - constructor?
4101 // rsp[0] - return address
4102 // rsp[8] - last argument
4103 Handle<Object> undefined_sentinel(
4104 masm->isolate()->heap()->undefined_value(),
4105 masm->isolate());
4106
4107 Label normal_sequence;
4108 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004109 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4110 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4111 STATIC_ASSERT(FAST_ELEMENTS == 2);
4112 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4113 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4114 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004115
4116 // is the low bit set? If so, we are holey and that is good.
4117 __ testb(rdx, Immediate(1));
4118 __ j(not_zero, &normal_sequence);
4119 }
4120
4121 // look at the first argument
4122 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4123 __ movp(rcx, args.GetArgumentOperand(0));
4124 __ testp(rcx, rcx);
4125 __ j(zero, &normal_sequence);
4126
4127 if (mode == DISABLE_ALLOCATION_SITES) {
4128 ElementsKind initial = GetInitialFastElementsKind();
4129 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4130
4131 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4132 holey_initial,
4133 DISABLE_ALLOCATION_SITES);
4134 __ TailCallStub(&stub_holey);
4135
4136 __ bind(&normal_sequence);
4137 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4138 initial,
4139 DISABLE_ALLOCATION_SITES);
4140 __ TailCallStub(&stub);
4141 } else if (mode == DONT_OVERRIDE) {
4142 // We are going to create a holey array, but our kind is non-holey.
4143 // Fix kind and retry (only if we have an allocation site in the slot).
4144 __ incl(rdx);
4145
4146 if (FLAG_debug_code) {
4147 Handle<Map> allocation_site_map =
4148 masm->isolate()->factory()->allocation_site_map();
4149 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4150 __ Assert(equal, kExpectedAllocationSite);
4151 }
4152
4153 // Save the resulting elements kind in type info. We can't just store r3
4154 // in the AllocationSite::transition_info field because elements kind is
4155 // restricted to a portion of the field...upper bits need to be left alone.
4156 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4157 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4158 Smi::FromInt(kFastElementsKindPackedToHoley));
4159
4160 __ bind(&normal_sequence);
4161 int last_index = GetSequenceIndexFromFastElementsKind(
4162 TERMINAL_FAST_ELEMENTS_KIND);
4163 for (int i = 0; i <= last_index; ++i) {
4164 Label next;
4165 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4166 __ cmpl(rdx, Immediate(kind));
4167 __ j(not_equal, &next);
4168 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4169 __ TailCallStub(&stub);
4170 __ bind(&next);
4171 }
4172
4173 // If we reached this point there is a problem.
4174 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4175 } else {
4176 UNREACHABLE();
4177 }
4178}
4179
4180
4181template<class T>
4182static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4183 int to_index = GetSequenceIndexFromFastElementsKind(
4184 TERMINAL_FAST_ELEMENTS_KIND);
4185 for (int i = 0; i <= to_index; ++i) {
4186 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4187 T stub(isolate, kind);
4188 stub.GetCode();
4189 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4190 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4191 stub1.GetCode();
4192 }
4193 }
4194}
4195
4196
4197void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4198 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4199 isolate);
4200 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4201 isolate);
4202 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4203 isolate);
4204}
4205
4206
4207void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4208 Isolate* isolate) {
4209 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4210 for (int i = 0; i < 2; i++) {
4211 // For internal arrays we only need a few things
4212 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4213 stubh1.GetCode();
4214 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4215 stubh2.GetCode();
4216 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4217 stubh3.GetCode();
4218 }
4219}
4220
4221
4222void ArrayConstructorStub::GenerateDispatchToArrayStub(
4223 MacroAssembler* masm,
4224 AllocationSiteOverrideMode mode) {
4225 if (argument_count() == ANY) {
4226 Label not_zero_case, not_one_case;
4227 __ testp(rax, rax);
4228 __ j(not_zero, &not_zero_case);
4229 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4230
4231 __ bind(&not_zero_case);
4232 __ cmpl(rax, Immediate(1));
4233 __ j(greater, &not_one_case);
4234 CreateArrayDispatchOneArgument(masm, mode);
4235
4236 __ bind(&not_one_case);
4237 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4238 } else if (argument_count() == NONE) {
4239 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4240 } else if (argument_count() == ONE) {
4241 CreateArrayDispatchOneArgument(masm, mode);
4242 } else if (argument_count() == MORE_THAN_ONE) {
4243 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4244 } else {
4245 UNREACHABLE();
4246 }
4247}
4248
4249
4250void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4251 // ----------- S t a t e -------------
4252 // -- rax : argc
4253 // -- rbx : AllocationSite or undefined
4254 // -- rdi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004255 // -- rdx : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004256 // -- rsp[0] : return address
4257 // -- rsp[8] : last argument
4258 // -----------------------------------
4259 if (FLAG_debug_code) {
4260 // The array construct code is only set for the global and natives
4261 // builtin Array functions which always have maps.
4262
4263 // Initial map for the builtin Array function should be a map.
4264 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4265 // Will both indicate a NULL and a Smi.
4266 STATIC_ASSERT(kSmiTag == 0);
4267 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4268 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4269 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4270 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4271
4272 // We should either have undefined in rbx or a valid AllocationSite
4273 __ AssertUndefinedOrAllocationSite(rbx);
4274 }
4275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004276 // Enter the context of the Array function.
4277 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
4278
4279 Label subclassing;
4280 __ cmpp(rdi, rdx);
4281 __ j(not_equal, &subclassing);
4282
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004283 Label no_info;
4284 // If the feedback vector is the undefined value call an array constructor
4285 // that doesn't use AllocationSites.
4286 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4287 __ j(equal, &no_info);
4288
4289 // Only look at the lower 16 bits of the transition info.
4290 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4291 __ SmiToInteger32(rdx, rdx);
4292 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4293 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4294 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4295
4296 __ bind(&no_info);
4297 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004298
4299 // Subclassing
4300 __ bind(&subclassing);
4301 switch (argument_count()) {
4302 case ANY:
4303 case MORE_THAN_ONE: {
4304 StackArgumentsAccessor args(rsp, rax);
4305 __ movp(args.GetReceiverOperand(), rdi);
4306 __ addp(rax, Immediate(3));
4307 break;
4308 }
4309 case NONE: {
4310 StackArgumentsAccessor args(rsp, 0);
4311 __ movp(args.GetReceiverOperand(), rdi);
4312 __ Set(rax, 3);
4313 break;
4314 }
4315 case ONE: {
4316 StackArgumentsAccessor args(rsp, 1);
4317 __ movp(args.GetReceiverOperand(), rdi);
4318 __ Set(rax, 4);
4319 break;
4320 }
4321 }
4322 __ PopReturnAddressTo(rcx);
4323 __ Push(rdx);
4324 __ Push(rbx);
4325 __ PushReturnAddressFrom(rcx);
4326 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004327}
4328
4329
4330void InternalArrayConstructorStub::GenerateCase(
4331 MacroAssembler* masm, ElementsKind kind) {
4332 Label not_zero_case, not_one_case;
4333 Label normal_sequence;
4334
4335 __ testp(rax, rax);
4336 __ j(not_zero, &not_zero_case);
4337 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4338 __ TailCallStub(&stub0);
4339
4340 __ bind(&not_zero_case);
4341 __ cmpl(rax, Immediate(1));
4342 __ j(greater, &not_one_case);
4343
4344 if (IsFastPackedElementsKind(kind)) {
4345 // We might need to create a holey array
4346 // look at the first argument
4347 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4348 __ movp(rcx, args.GetArgumentOperand(0));
4349 __ testp(rcx, rcx);
4350 __ j(zero, &normal_sequence);
4351
4352 InternalArraySingleArgumentConstructorStub
4353 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4354 __ TailCallStub(&stub1_holey);
4355 }
4356
4357 __ bind(&normal_sequence);
4358 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4359 __ TailCallStub(&stub1);
4360
4361 __ bind(&not_one_case);
4362 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4363 __ TailCallStub(&stubN);
4364}
4365
4366
4367void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4368 // ----------- S t a t e -------------
4369 // -- rax : argc
4370 // -- rdi : constructor
4371 // -- rsp[0] : return address
4372 // -- rsp[8] : last argument
4373 // -----------------------------------
4374
4375 if (FLAG_debug_code) {
4376 // The array construct code is only set for the global and natives
4377 // builtin Array functions which always have maps.
4378
4379 // Initial map for the builtin Array function should be a map.
4380 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4381 // Will both indicate a NULL and a Smi.
4382 STATIC_ASSERT(kSmiTag == 0);
4383 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4384 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4385 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4386 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4387 }
4388
4389 // Figure out the right elements kind
4390 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4391
4392 // Load the map's "bit field 2" into |result|. We only need the first byte,
4393 // but the following masking takes care of that anyway.
4394 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4395 // Retrieve elements_kind from bit field 2.
4396 __ DecodeField<Map::ElementsKindBits>(rcx);
4397
4398 if (FLAG_debug_code) {
4399 Label done;
4400 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4401 __ j(equal, &done);
4402 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4403 __ Assert(equal,
4404 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4405 __ bind(&done);
4406 }
4407
4408 Label fast_elements_case;
4409 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4410 __ j(equal, &fast_elements_case);
4411 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4412
4413 __ bind(&fast_elements_case);
4414 GenerateCase(masm, FAST_ELEMENTS);
4415}
4416
4417
Ben Murdoch097c5b22016-05-18 11:27:45 +01004418void FastNewObjectStub::Generate(MacroAssembler* masm) {
4419 // ----------- S t a t e -------------
4420 // -- rdi : target
4421 // -- rdx : new target
4422 // -- rsi : context
4423 // -- rsp[0] : return address
4424 // -----------------------------------
4425 __ AssertFunction(rdi);
4426 __ AssertReceiver(rdx);
4427
4428 // Verify that the new target is a JSFunction.
4429 Label new_object;
4430 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
4431 __ j(not_equal, &new_object);
4432
4433 // Load the initial map and verify that it's in fact a map.
4434 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
4435 __ JumpIfSmi(rcx, &new_object);
4436 __ CmpObjectType(rcx, MAP_TYPE, rbx);
4437 __ j(not_equal, &new_object);
4438
4439 // Fall back to runtime if the target differs from the new target's
4440 // initial map constructor.
4441 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
4442 __ j(not_equal, &new_object);
4443
4444 // Allocate the JSObject on the heap.
4445 Label allocate, done_allocate;
4446 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4447 __ leal(rbx, Operand(rbx, times_pointer_size, 0));
4448 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4449 __ bind(&done_allocate);
4450
4451 // Initialize the JSObject fields.
4452 __ movp(Operand(rax, JSObject::kMapOffset), rcx);
4453 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
4454 __ movp(Operand(rax, JSObject::kPropertiesOffset), rbx);
4455 __ movp(Operand(rax, JSObject::kElementsOffset), rbx);
4456 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
4457 __ leap(rbx, Operand(rax, JSObject::kHeaderSize));
4458
4459 // ----------- S t a t e -------------
4460 // -- rax : result (untagged)
4461 // -- rbx : result fields (untagged)
4462 // -- rdi : result end (untagged)
4463 // -- rcx : initial map
4464 // -- rsi : context
4465 // -- rsp[0] : return address
4466 // -----------------------------------
4467
4468 // Perform in-object slack tracking if requested.
4469 Label slack_tracking;
4470 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4471 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
4472 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4473 Immediate(Map::ConstructionCounter::kMask));
4474 __ j(not_zero, &slack_tracking, Label::kNear);
4475 {
4476 // Initialize all in-object fields with undefined.
4477 __ InitializeFieldsWithFiller(rbx, rdi, r11);
4478
4479 // Add the object tag to make the JSObject real.
4480 STATIC_ASSERT(kHeapObjectTag == 1);
4481 __ incp(rax);
4482 __ Ret();
4483 }
4484 __ bind(&slack_tracking);
4485 {
4486 // Decrease generous allocation count.
4487 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4488 __ subl(FieldOperand(rcx, Map::kBitField3Offset),
4489 Immediate(1 << Map::ConstructionCounter::kShift));
4490
4491 // Initialize the in-object fields with undefined.
4492 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
4493 __ negp(rdx);
4494 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
4495 __ InitializeFieldsWithFiller(rbx, rdx, r11);
4496
4497 // Initialize the remaining (reserved) fields with one pointer filler map.
4498 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
4499 __ InitializeFieldsWithFiller(rdx, rdi, r11);
4500
4501 // Add the object tag to make the JSObject real.
4502 STATIC_ASSERT(kHeapObjectTag == 1);
4503 __ incp(rax);
4504
4505 // Check if we can finalize the instance size.
4506 Label finalize;
4507 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4508 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4509 Immediate(Map::ConstructionCounter::kMask));
4510 __ j(zero, &finalize, Label::kNear);
4511 __ Ret();
4512
4513 // Finalize the instance size.
4514 __ bind(&finalize);
4515 {
4516 FrameScope scope(masm, StackFrame::INTERNAL);
4517 __ Push(rax);
4518 __ Push(rcx);
4519 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4520 __ Pop(rax);
4521 }
4522 __ Ret();
4523 }
4524
4525 // Fall back to %AllocateInNewSpace.
4526 __ bind(&allocate);
4527 {
4528 FrameScope scope(masm, StackFrame::INTERNAL);
4529 __ Integer32ToSmi(rbx, rbx);
4530 __ Push(rcx);
4531 __ Push(rbx);
4532 __ CallRuntime(Runtime::kAllocateInNewSpace);
4533 __ Pop(rcx);
4534 }
4535 STATIC_ASSERT(kHeapObjectTag == 1);
4536 __ decp(rax);
4537 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4538 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
4539 __ jmp(&done_allocate);
4540
4541 // Fall back to %NewObject.
4542 __ bind(&new_object);
4543 __ PopReturnAddressTo(rcx);
4544 __ Push(rdi);
4545 __ Push(rdx);
4546 __ PushReturnAddressFrom(rcx);
4547 __ TailCallRuntime(Runtime::kNewObject);
4548}
4549
4550
4551void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4552 // ----------- S t a t e -------------
4553 // -- rdi : function
4554 // -- rsi : context
4555 // -- rbp : frame pointer
4556 // -- rsp[0] : return address
4557 // -----------------------------------
4558 __ AssertFunction(rdi);
4559
4560 // For Ignition we need to skip all possible handler/stub frames until
4561 // we reach the JavaScript frame for the function (similar to what the
4562 // runtime fallback implementation does). So make rdx point to that
4563 // JavaScript frame.
4564 {
4565 Label loop, loop_entry;
4566 __ movp(rdx, rbp);
4567 __ jmp(&loop_entry, Label::kNear);
4568 __ bind(&loop);
4569 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4570 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01004571 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004572 __ j(not_equal, &loop);
4573 }
4574
4575 // Check if we have rest parameters (only possible if we have an
4576 // arguments adaptor frame below the function frame).
4577 Label no_rest_parameters;
4578 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004579 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004580 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4581 __ j(not_equal, &no_rest_parameters, Label::kNear);
4582
4583 // Check if the arguments adaptor frame contains more arguments than
4584 // specified by the function's internal formal parameter count.
4585 Label rest_parameters;
4586 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4587 __ LoadSharedFunctionInfoSpecialField(
4588 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4589 __ SmiToInteger32(
4590 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4591 __ subl(rax, rcx);
4592 __ j(greater, &rest_parameters);
4593
4594 // Return an empty rest parameter array.
4595 __ bind(&no_rest_parameters);
4596 {
4597 // ----------- S t a t e -------------
4598 // -- rsi : context
4599 // -- rsp[0] : return address
4600 // -----------------------------------
4601
4602 // Allocate an empty rest parameter array.
4603 Label allocate, done_allocate;
4604 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, TAG_OBJECT);
4605 __ bind(&done_allocate);
4606
4607 // Setup the rest parameter array in rax.
4608 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4609 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4610 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4611 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4612 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
4613 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
4614 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4615 __ Ret();
4616
4617 // Fall back to %AllocateInNewSpace.
4618 __ bind(&allocate);
4619 {
4620 FrameScope scope(masm, StackFrame::INTERNAL);
4621 __ Push(Smi::FromInt(JSArray::kSize));
4622 __ CallRuntime(Runtime::kAllocateInNewSpace);
4623 }
4624 __ jmp(&done_allocate);
4625 }
4626
4627 __ bind(&rest_parameters);
4628 {
4629 // Compute the pointer to the first rest parameter (skippping the receiver).
4630 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4631 StandardFrameConstants::kCallerSPOffset -
4632 1 * kPointerSize));
4633
4634 // ----------- S t a t e -------------
4635 // -- rsi : context
4636 // -- rax : number of rest parameters
4637 // -- rbx : pointer to first rest parameters
4638 // -- rsp[0] : return address
4639 // -----------------------------------
4640
4641 // Allocate space for the rest parameter array plus the backing store.
4642 Label allocate, done_allocate;
4643 __ leal(rcx, Operand(rax, times_pointer_size,
4644 JSArray::kSize + FixedArray::kHeaderSize));
4645 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
4646 __ bind(&done_allocate);
4647
4648 // Compute the arguments.length in rdi.
4649 __ Integer32ToSmi(rdi, rax);
4650
4651 // Setup the elements array in rdx.
4652 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4653 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4654 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4655 {
4656 Label loop, done_loop;
4657 __ Set(rcx, 0);
4658 __ bind(&loop);
4659 __ cmpl(rcx, rax);
4660 __ j(equal, &done_loop, Label::kNear);
4661 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4662 __ movp(
4663 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4664 kScratchRegister);
4665 __ subp(rbx, Immediate(1 * kPointerSize));
4666 __ addl(rcx, Immediate(1));
4667 __ jmp(&loop);
4668 __ bind(&done_loop);
4669 }
4670
4671 // Setup the rest parameter array in rax.
4672 __ leap(rax,
4673 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4674 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4675 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4676 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4677 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4678 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
4679 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
4680 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4681 __ Ret();
4682
4683 // Fall back to %AllocateInNewSpace.
4684 __ bind(&allocate);
4685 {
4686 FrameScope scope(masm, StackFrame::INTERNAL);
4687 __ Integer32ToSmi(rax, rax);
4688 __ Integer32ToSmi(rcx, rcx);
4689 __ Push(rax);
4690 __ Push(rbx);
4691 __ Push(rcx);
4692 __ CallRuntime(Runtime::kAllocateInNewSpace);
4693 __ movp(rdx, rax);
4694 __ Pop(rbx);
4695 __ Pop(rax);
4696 __ SmiToInteger32(rax, rax);
4697 }
4698 __ jmp(&done_allocate);
4699 }
4700}
4701
4702
4703void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4704 // ----------- S t a t e -------------
4705 // -- rdi : function
4706 // -- rsi : context
4707 // -- rbp : frame pointer
4708 // -- rsp[0] : return address
4709 // -----------------------------------
4710 __ AssertFunction(rdi);
4711
4712 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4713 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4714 __ LoadSharedFunctionInfoSpecialField(
4715 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4716 __ leap(rdx, Operand(rbp, rcx, times_pointer_size,
4717 StandardFrameConstants::kCallerSPOffset));
4718 __ Integer32ToSmi(rcx, rcx);
4719
4720 // rcx : number of parameters (tagged)
4721 // rdx : parameters pointer
4722 // rdi : function
4723 // rsp[0] : return address
4724 // Registers used over the whole function:
4725 // rbx: the mapped parameter count (untagged)
4726 // rax: the allocated object (tagged).
4727 Factory* factory = isolate()->factory();
4728
4729 __ SmiToInteger64(rbx, rcx);
4730 // rbx = parameter count (untagged)
4731
4732 // Check if the calling frame is an arguments adaptor frame.
4733 Label adaptor_frame, try_allocate, runtime;
4734 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004735 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004736 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4737 __ j(equal, &adaptor_frame);
4738
4739 // No adaptor, parameter count = argument count.
4740 __ movp(r11, rbx);
4741 __ jmp(&try_allocate, Label::kNear);
4742
4743 // We have an adaptor frame. Patch the parameters pointer.
4744 __ bind(&adaptor_frame);
4745 __ SmiToInteger64(
4746 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
4747 __ leap(rdx, Operand(rax, r11, times_pointer_size,
4748 StandardFrameConstants::kCallerSPOffset));
4749
4750 // rbx = parameter count (untagged)
4751 // r11 = argument count (untagged)
4752 // Compute the mapped parameter count = min(rbx, r11) in rbx.
4753 __ cmpp(rbx, r11);
4754 __ j(less_equal, &try_allocate, Label::kNear);
4755 __ movp(rbx, r11);
4756
4757 __ bind(&try_allocate);
4758
4759 // Compute the sizes of backing store, parameter map, and arguments object.
4760 // 1. Parameter map, has 2 extra words containing context and backing store.
4761 const int kParameterMapHeaderSize =
4762 FixedArray::kHeaderSize + 2 * kPointerSize;
4763 Label no_parameter_map;
4764 __ xorp(r8, r8);
4765 __ testp(rbx, rbx);
4766 __ j(zero, &no_parameter_map, Label::kNear);
4767 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
4768 __ bind(&no_parameter_map);
4769
4770 // 2. Backing store.
4771 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
4772
4773 // 3. Arguments object.
4774 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
4775
4776 // Do the allocation of all three objects in one go.
4777 __ Allocate(r8, rax, r9, no_reg, &runtime, TAG_OBJECT);
4778
4779 // rax = address of new object(s) (tagged)
4780 // r11 = argument count (untagged)
4781 // Get the arguments map from the current native context into r9.
4782 Label has_mapped_parameters, instantiate;
4783 __ movp(r9, NativeContextOperand());
4784 __ testp(rbx, rbx);
4785 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4786
4787 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
4788 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
4789 __ jmp(&instantiate, Label::kNear);
4790
4791 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
4792 __ bind(&has_mapped_parameters);
4793 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
4794 __ bind(&instantiate);
4795
4796 // rax = address of new object (tagged)
4797 // rbx = mapped parameter count (untagged)
4798 // r11 = argument count (untagged)
4799 // r9 = address of arguments map (tagged)
4800 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
4801 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
4802 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
4803 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
4804
4805 // Set up the callee in-object property.
4806 __ AssertNotSmi(rdi);
4807 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
4808
4809 // Use the length (smi tagged) and set that as an in-object property too.
4810 // Note: r11 is tagged from here on.
4811 __ Integer32ToSmi(r11, r11);
4812 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
4813
4814 // Set up the elements pointer in the allocated arguments object.
4815 // If we allocated a parameter map, rdi will point there, otherwise to the
4816 // backing store.
4817 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
4818 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
4819
4820 // rax = address of new object (tagged)
4821 // rbx = mapped parameter count (untagged)
4822 // r11 = argument count (tagged)
4823 // rdi = address of parameter map or backing store (tagged)
4824
4825 // Initialize parameter map. If there are no mapped arguments, we're done.
4826 Label skip_parameter_map;
4827 __ testp(rbx, rbx);
4828 __ j(zero, &skip_parameter_map);
4829
4830 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
4831 // rbx contains the untagged argument count. Add 2 and tag to write.
4832 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
4833 __ Integer64PlusConstantToSmi(r9, rbx, 2);
4834 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
4835 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
4836 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4837 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
4838
4839 // Copy the parameter slots and the holes in the arguments.
4840 // We need to fill in mapped_parameter_count slots. They index the context,
4841 // where parameters are stored in reverse order, at
4842 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4843 // The mapped parameter thus need to get indices
4844 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4845 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4846 // We loop from right to left.
4847 Label parameters_loop, parameters_test;
4848
4849 // Load tagged parameter count into r9.
4850 __ Integer32ToSmi(r9, rbx);
4851 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
4852 __ addp(r8, rcx);
4853 __ subp(r8, r9);
4854 __ movp(rcx, rdi);
4855 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4856 __ SmiToInteger64(r9, r9);
4857 // r9 = loop variable (untagged)
4858 // r8 = mapping index (tagged)
4859 // rcx = address of parameter map (tagged)
4860 // rdi = address of backing store (tagged)
4861 __ jmp(&parameters_test, Label::kNear);
4862
4863 __ bind(&parameters_loop);
4864 __ subp(r9, Immediate(1));
4865 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
4866 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
4867 r8);
4868 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
4869 kScratchRegister);
4870 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
4871 __ bind(&parameters_test);
4872 __ testp(r9, r9);
4873 __ j(not_zero, &parameters_loop, Label::kNear);
4874
4875 __ bind(&skip_parameter_map);
4876
4877 // r11 = argument count (tagged)
4878 // rdi = address of backing store (tagged)
4879 // Copy arguments header and remaining slots (if there are any).
4880 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
4881 factory->fixed_array_map());
4882 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
4883
4884 Label arguments_loop, arguments_test;
4885 __ movp(r8, rbx);
4886 // Untag r11 for the loop below.
4887 __ SmiToInteger64(r11, r11);
4888 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
4889 __ subp(rdx, kScratchRegister);
4890 __ jmp(&arguments_test, Label::kNear);
4891
4892 __ bind(&arguments_loop);
4893 __ subp(rdx, Immediate(kPointerSize));
4894 __ movp(r9, Operand(rdx, 0));
4895 __ movp(FieldOperand(rdi, r8,
4896 times_pointer_size,
4897 FixedArray::kHeaderSize),
4898 r9);
4899 __ addp(r8, Immediate(1));
4900
4901 __ bind(&arguments_test);
4902 __ cmpp(r8, r11);
4903 __ j(less, &arguments_loop, Label::kNear);
4904
4905 // Return.
4906 __ ret(0);
4907
4908 // Do the runtime call to allocate the arguments object.
4909 // r11 = argument count (untagged)
4910 __ bind(&runtime);
4911 __ Integer32ToSmi(r11, r11);
4912 __ PopReturnAddressTo(rax);
4913 __ Push(rdi); // Push function.
4914 __ Push(rdx); // Push parameters pointer.
4915 __ Push(r11); // Push parameter count.
4916 __ PushReturnAddressFrom(rax);
4917 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4918}
4919
4920
4921void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4922 // ----------- S t a t e -------------
4923 // -- rdi : function
4924 // -- rsi : context
4925 // -- rbp : frame pointer
4926 // -- rsp[0] : return address
4927 // -----------------------------------
4928 __ AssertFunction(rdi);
4929
4930 // For Ignition we need to skip all possible handler/stub frames until
4931 // we reach the JavaScript frame for the function (similar to what the
4932 // runtime fallback implementation does). So make rdx point to that
4933 // JavaScript frame.
4934 {
4935 Label loop, loop_entry;
4936 __ movp(rdx, rbp);
4937 __ jmp(&loop_entry, Label::kNear);
4938 __ bind(&loop);
4939 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
4940 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01004941 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004942 __ j(not_equal, &loop);
4943 }
4944
4945 // Check if we have an arguments adaptor frame below the function frame.
4946 Label arguments_adaptor, arguments_done;
4947 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004948 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004949 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4950 __ j(equal, &arguments_adaptor, Label::kNear);
4951 {
4952 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4953 __ LoadSharedFunctionInfoSpecialField(
4954 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
4955 __ leap(rbx, Operand(rdx, rax, times_pointer_size,
4956 StandardFrameConstants::kCallerSPOffset -
4957 1 * kPointerSize));
4958 }
4959 __ jmp(&arguments_done, Label::kNear);
4960 __ bind(&arguments_adaptor);
4961 {
4962 __ SmiToInteger32(
4963 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4964 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4965 StandardFrameConstants::kCallerSPOffset -
4966 1 * kPointerSize));
4967 }
4968 __ bind(&arguments_done);
4969
4970 // ----------- S t a t e -------------
4971 // -- rax : number of arguments
4972 // -- rbx : pointer to the first argument
4973 // -- rsi : context
4974 // -- rsp[0] : return address
4975 // -----------------------------------
4976
4977 // Allocate space for the strict arguments object plus the backing store.
4978 Label allocate, done_allocate;
4979 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
4980 FixedArray::kHeaderSize));
4981 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, TAG_OBJECT);
4982 __ bind(&done_allocate);
4983
4984 // Compute the arguments.length in rdi.
4985 __ Integer32ToSmi(rdi, rax);
4986
4987 // Setup the elements array in rdx.
4988 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4989 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4990 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4991 {
4992 Label loop, done_loop;
4993 __ Set(rcx, 0);
4994 __ bind(&loop);
4995 __ cmpl(rcx, rax);
4996 __ j(equal, &done_loop, Label::kNear);
4997 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4998 __ movp(
4999 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
5000 kScratchRegister);
5001 __ subp(rbx, Immediate(1 * kPointerSize));
5002 __ addl(rcx, Immediate(1));
5003 __ jmp(&loop);
5004 __ bind(&done_loop);
5005 }
5006
5007 // Setup the strict arguments object in rax.
5008 __ leap(rax,
5009 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
5010 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
5011 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
5012 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
5013 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
5014 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
5015 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
5016 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5017 __ Ret();
5018
5019 // Fall back to %AllocateInNewSpace.
5020 __ bind(&allocate);
5021 {
5022 FrameScope scope(masm, StackFrame::INTERNAL);
5023 __ Integer32ToSmi(rax, rax);
5024 __ Integer32ToSmi(rcx, rcx);
5025 __ Push(rax);
5026 __ Push(rbx);
5027 __ Push(rcx);
5028 __ CallRuntime(Runtime::kAllocateInNewSpace);
5029 __ movp(rdx, rax);
5030 __ Pop(rbx);
5031 __ Pop(rax);
5032 __ SmiToInteger32(rax, rax);
5033 }
5034 __ jmp(&done_allocate);
5035}
5036
5037
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005038void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5039 Register context_reg = rsi;
5040 Register slot_reg = rbx;
5041 Register result_reg = rax;
5042 Label slow_case;
5043
5044 // Go up context chain to the script context.
5045 for (int i = 0; i < depth(); ++i) {
5046 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5047 context_reg = rdi;
5048 }
5049
5050 // Load the PropertyCell value at the specified slot.
5051 __ movp(result_reg, ContextOperand(context_reg, slot_reg));
5052 __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
5053
5054 // Check that value is not the_hole.
5055 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
5056 __ j(equal, &slow_case, Label::kNear);
5057 __ Ret();
5058
5059 // Fallback to the runtime.
5060 __ bind(&slow_case);
5061 __ Integer32ToSmi(slot_reg, slot_reg);
5062 __ PopReturnAddressTo(kScratchRegister);
5063 __ Push(slot_reg);
5064 __ Push(kScratchRegister);
5065 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5066}
5067
5068
5069void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5070 Register context_reg = rsi;
5071 Register slot_reg = rbx;
5072 Register value_reg = rax;
5073 Register cell_reg = r8;
5074 Register cell_details_reg = rdx;
5075 Register cell_value_reg = r9;
5076 Label fast_heapobject_case, fast_smi_case, slow_case;
5077
5078 if (FLAG_debug_code) {
5079 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
5080 __ Check(not_equal, kUnexpectedValue);
5081 }
5082
5083 // Go up context chain to the script context.
5084 for (int i = 0; i < depth(); ++i) {
5085 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5086 context_reg = rdi;
5087 }
5088
5089 // Load the PropertyCell at the specified slot.
5090 __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
5091
5092 // Load PropertyDetails for the cell (actually only the cell_type, kind and
5093 // READ_ONLY bit of attributes).
5094 __ SmiToInteger32(cell_details_reg,
5095 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
5096 __ andl(cell_details_reg,
5097 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5098 PropertyDetails::KindField::kMask |
5099 PropertyDetails::kAttributesReadOnlyMask));
5100
5101 // Check if PropertyCell holds mutable data.
5102 Label not_mutable_data;
5103 __ cmpl(cell_details_reg,
5104 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5105 PropertyCellType::kMutable) |
5106 PropertyDetails::KindField::encode(kData)));
5107 __ j(not_equal, &not_mutable_data);
5108 __ JumpIfSmi(value_reg, &fast_smi_case);
5109 __ bind(&fast_heapobject_case);
5110 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5111 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5112 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5113 OMIT_SMI_CHECK);
5114 // RecordWriteField clobbers the value register, so we need to reload.
5115 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5116 __ Ret();
5117 __ bind(&not_mutable_data);
5118
5119 // Check if PropertyCell value matches the new value (relevant for Constant,
5120 // ConstantType and Undefined cells).
5121 Label not_same_value;
5122 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5123 __ cmpp(cell_value_reg, value_reg);
5124 __ j(not_equal, &not_same_value,
5125 FLAG_debug_code ? Label::kFar : Label::kNear);
5126 // Make sure the PropertyCell is not marked READ_ONLY.
5127 __ testl(cell_details_reg,
5128 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5129 __ j(not_zero, &slow_case);
5130 if (FLAG_debug_code) {
5131 Label done;
5132 // This can only be true for Constant, ConstantType and Undefined cells,
5133 // because we never store the_hole via this stub.
5134 __ cmpl(cell_details_reg,
5135 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5136 PropertyCellType::kConstant) |
5137 PropertyDetails::KindField::encode(kData)));
5138 __ j(equal, &done);
5139 __ cmpl(cell_details_reg,
5140 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5141 PropertyCellType::kConstantType) |
5142 PropertyDetails::KindField::encode(kData)));
5143 __ j(equal, &done);
5144 __ cmpl(cell_details_reg,
5145 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5146 PropertyCellType::kUndefined) |
5147 PropertyDetails::KindField::encode(kData)));
5148 __ Check(equal, kUnexpectedValue);
5149 __ bind(&done);
5150 }
5151 __ Ret();
5152 __ bind(&not_same_value);
5153
5154 // Check if PropertyCell contains data with constant type (and is not
5155 // READ_ONLY).
5156 __ cmpl(cell_details_reg,
5157 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5158 PropertyCellType::kConstantType) |
5159 PropertyDetails::KindField::encode(kData)));
5160 __ j(not_equal, &slow_case, Label::kNear);
5161
5162 // Now either both old and new values must be SMIs or both must be heap
5163 // objects with same map.
5164 Label value_is_heap_object;
5165 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5166 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5167 // Old and new values are SMIs, no need for a write barrier here.
5168 __ bind(&fast_smi_case);
5169 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5170 __ Ret();
5171 __ bind(&value_is_heap_object);
5172 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5173 Register cell_value_map_reg = cell_value_reg;
5174 __ movp(cell_value_map_reg,
5175 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5176 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5177 __ j(equal, &fast_heapobject_case);
5178
5179 // Fallback to the runtime.
5180 __ bind(&slow_case);
5181 __ Integer32ToSmi(slot_reg, slot_reg);
5182 __ PopReturnAddressTo(kScratchRegister);
5183 __ Push(slot_reg);
5184 __ Push(value_reg);
5185 __ Push(kScratchRegister);
5186 __ TailCallRuntime(is_strict(language_mode())
5187 ? Runtime::kStoreGlobalViaContext_Strict
5188 : Runtime::kStoreGlobalViaContext_Sloppy);
5189}
5190
5191
5192static int Offset(ExternalReference ref0, ExternalReference ref1) {
5193 int64_t offset = (ref0.address() - ref1.address());
5194 // Check that fits into int.
5195 DCHECK(static_cast<int>(offset) == offset);
5196 return static_cast<int>(offset);
5197}
5198
5199
5200// Prepares stack to put arguments (aligns and so on). WIN64 calling
5201// convention requires to put the pointer to the return value slot into
5202// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
5203// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
5204// inside the exit frame (not GCed) accessible via StackSpaceOperand.
5205static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
5206 __ EnterApiExitFrame(arg_stack_space);
5207}
5208
5209
5210// Calls an API function. Allocates HandleScope, extracts returned value
5211// from handle and propagates exceptions. Clobbers r14, r15, rbx and
5212// caller-save registers. Restores context. On return removes
5213// stack_space * kPointerSize (GCed).
5214static void CallApiFunctionAndReturn(MacroAssembler* masm,
5215 Register function_address,
5216 ExternalReference thunk_ref,
5217 Register thunk_last_arg, int stack_space,
5218 Operand* stack_space_operand,
5219 Operand return_value_operand,
5220 Operand* context_restore_operand) {
5221 Label prologue;
5222 Label promote_scheduled_exception;
5223 Label delete_allocated_handles;
5224 Label leave_exit_frame;
5225 Label write_back;
5226
5227 Isolate* isolate = masm->isolate();
5228 Factory* factory = isolate->factory();
5229 ExternalReference next_address =
5230 ExternalReference::handle_scope_next_address(isolate);
5231 const int kNextOffset = 0;
5232 const int kLimitOffset = Offset(
5233 ExternalReference::handle_scope_limit_address(isolate), next_address);
5234 const int kLevelOffset = Offset(
5235 ExternalReference::handle_scope_level_address(isolate), next_address);
5236 ExternalReference scheduled_exception_address =
5237 ExternalReference::scheduled_exception_address(isolate);
5238
5239 DCHECK(rdx.is(function_address) || r8.is(function_address));
5240 // Allocate HandleScope in callee-save registers.
5241 Register prev_next_address_reg = r14;
5242 Register prev_limit_reg = rbx;
5243 Register base_reg = r15;
5244 __ Move(base_reg, next_address);
5245 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
5246 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5247 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
5248
5249 if (FLAG_log_timer_events) {
5250 FrameScope frame(masm, StackFrame::MANUAL);
5251 __ PushSafepointRegisters();
5252 __ PrepareCallCFunction(1);
5253 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5254 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5255 1);
5256 __ PopSafepointRegisters();
5257 }
5258
5259 Label profiler_disabled;
5260 Label end_profiler_check;
5261 __ Move(rax, ExternalReference::is_profiling_address(isolate));
5262 __ cmpb(Operand(rax, 0), Immediate(0));
5263 __ j(zero, &profiler_disabled);
5264
5265 // Third parameter is the address of the actual getter function.
5266 __ Move(thunk_last_arg, function_address);
5267 __ Move(rax, thunk_ref);
5268 __ jmp(&end_profiler_check);
5269
5270 __ bind(&profiler_disabled);
5271 // Call the api function!
5272 __ Move(rax, function_address);
5273
5274 __ bind(&end_profiler_check);
5275
5276 // Call the api function!
5277 __ call(rax);
5278
5279 if (FLAG_log_timer_events) {
5280 FrameScope frame(masm, StackFrame::MANUAL);
5281 __ PushSafepointRegisters();
5282 __ PrepareCallCFunction(1);
5283 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5284 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5285 1);
5286 __ PopSafepointRegisters();
5287 }
5288
5289 // Load the value from ReturnValue
5290 __ movp(rax, return_value_operand);
5291 __ bind(&prologue);
5292
5293 // No more valid handles (the result handle was the last one). Restore
5294 // previous handle scope.
5295 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
5296 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
5297 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5298 __ j(not_equal, &delete_allocated_handles);
5299
5300 // Leave the API exit frame.
5301 __ bind(&leave_exit_frame);
5302 bool restore_context = context_restore_operand != NULL;
5303 if (restore_context) {
5304 __ movp(rsi, *context_restore_operand);
5305 }
5306 if (stack_space_operand != nullptr) {
5307 __ movp(rbx, *stack_space_operand);
5308 }
5309 __ LeaveApiExitFrame(!restore_context);
5310
5311 // Check if the function scheduled an exception.
5312 __ Move(rdi, scheduled_exception_address);
5313 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5314 __ j(not_equal, &promote_scheduled_exception);
5315
5316#if DEBUG
5317 // Check if the function returned a valid JavaScript value.
5318 Label ok;
5319 Register return_value = rax;
5320 Register map = rcx;
5321
5322 __ JumpIfSmi(return_value, &ok, Label::kNear);
5323 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
5324
5325 __ CmpInstanceType(map, LAST_NAME_TYPE);
5326 __ j(below_equal, &ok, Label::kNear);
5327
5328 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5329 __ j(above_equal, &ok, Label::kNear);
5330
5331 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5332 __ j(equal, &ok, Label::kNear);
5333
5334 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
5335 __ j(equal, &ok, Label::kNear);
5336
5337 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
5338 __ j(equal, &ok, Label::kNear);
5339
5340 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
5341 __ j(equal, &ok, Label::kNear);
5342
5343 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
5344 __ j(equal, &ok, Label::kNear);
5345
5346 __ Abort(kAPICallReturnedInvalidObject);
5347
5348 __ bind(&ok);
5349#endif
5350
5351 if (stack_space_operand != nullptr) {
5352 DCHECK_EQ(stack_space, 0);
5353 __ PopReturnAddressTo(rcx);
5354 __ addq(rsp, rbx);
5355 __ jmp(rcx);
5356 } else {
5357 __ ret(stack_space * kPointerSize);
5358 }
5359
5360 // Re-throw by promoting a scheduled exception.
5361 __ bind(&promote_scheduled_exception);
5362 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5363
5364 // HandleScope limit has changed. Delete allocated extensions.
5365 __ bind(&delete_allocated_handles);
5366 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
5367 __ movp(prev_limit_reg, rax);
5368 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5369 __ LoadAddress(rax,
5370 ExternalReference::delete_handle_scope_extensions(isolate));
5371 __ call(rax);
5372 __ movp(rax, prev_limit_reg);
5373 __ jmp(&leave_exit_frame);
5374}
5375
Ben Murdochda12d292016-06-02 14:46:10 +01005376void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005377 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005378 // -- rdi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005379 // -- rbx : call_data
5380 // -- rcx : holder
5381 // -- rdx : api_function_address
5382 // -- rsi : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005383 // -- rax : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005384 // -- rsp[0] : return address
5385 // -- rsp[8] : last argument
5386 // -- ...
5387 // -- rsp[argc * 8] : first argument
5388 // -- rsp[(argc + 1) * 8] : receiver
5389 // -----------------------------------
5390
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005391 Register callee = rdi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005392 Register call_data = rbx;
5393 Register holder = rcx;
5394 Register api_function_address = rdx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005395 Register context = rsi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005396 Register return_address = r8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005397
5398 typedef FunctionCallbackArguments FCA;
5399
5400 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5401 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5402 STATIC_ASSERT(FCA::kDataIndex == 4);
5403 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5404 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5405 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5406 STATIC_ASSERT(FCA::kHolderIndex == 0);
5407 STATIC_ASSERT(FCA::kArgsLength == 7);
5408
5409 __ PopReturnAddressTo(return_address);
5410
5411 // context save
5412 __ Push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005413
5414 // callee
5415 __ Push(callee);
5416
5417 // call data
5418 __ Push(call_data);
5419 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005420 if (!this->call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005421 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5422 }
5423 // return value
5424 __ Push(scratch);
5425 // return value default
5426 __ Push(scratch);
5427 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005428 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005429 __ Push(scratch);
5430 // holder
5431 __ Push(holder);
5432
5433 __ movp(scratch, rsp);
5434 // Push return address back on stack.
5435 __ PushReturnAddressFrom(return_address);
5436
Ben Murdochda12d292016-06-02 14:46:10 +01005437 if (!this->is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005438 // load context from callee
5439 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5440 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005441
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005442 // Allocate the v8::Arguments structure in the arguments' space since
5443 // it's not controlled by GC.
5444 const int kApiStackSpace = 4;
5445
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005446 PrepareCallApiFunction(masm, kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005447
5448 // FunctionCallbackInfo::implicit_args_.
Ben Murdochda12d292016-06-02 14:46:10 +01005449 int argc = this->argc();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005450 __ movp(StackSpaceOperand(0), scratch);
Ben Murdochda12d292016-06-02 14:46:10 +01005451 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5452 // FunctionCallbackInfo::values_.
5453 __ movp(StackSpaceOperand(1), scratch);
5454 // FunctionCallbackInfo::length_.
5455 __ Set(StackSpaceOperand(2), argc);
5456 // FunctionCallbackInfo::is_construct_call_.
5457 __ Set(StackSpaceOperand(3), 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005458
5459#if defined(__MINGW64__) || defined(_WIN64)
5460 Register arguments_arg = rcx;
5461 Register callback_arg = rdx;
5462#else
5463 Register arguments_arg = rdi;
5464 Register callback_arg = rsi;
5465#endif
5466
5467 // It's okay if api_function_address == callback_arg
5468 // but not arguments_arg
5469 DCHECK(!api_function_address.is(arguments_arg));
5470
5471 // v8::InvocationCallback's argument.
5472 __ leap(arguments_arg, StackSpaceOperand(0));
5473
5474 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005475 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005476
5477 // Accessor for FunctionCallbackInfo and first js arg.
5478 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5479 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5480 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5481 FCA::kArgsLength - FCA::kContextSaveIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005482 Operand is_construct_call_operand = StackSpaceOperand(3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005483 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
Ben Murdochda12d292016-06-02 14:46:10 +01005484 this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005485 int stack_space = 0;
5486 Operand* stack_space_operand = &is_construct_call_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005487 stack_space = argc + FCA::kArgsLength + 1;
5488 stack_space_operand = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005489 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5490 stack_space, stack_space_operand,
5491 return_value_operand, &context_restore_operand);
5492}
5493
5494
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005495void CallApiGetterStub::Generate(MacroAssembler* masm) {
5496 // ----------- S t a t e -------------
Ben Murdoch097c5b22016-05-18 11:27:45 +01005497 // -- rsp[0] : return address
5498 // -- rsp[8] : name
5499 // -- rsp[16 .. (16 + kArgsLength*8)] : v8::PropertyCallbackInfo::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005500 // -- ...
Ben Murdoch097c5b22016-05-18 11:27:45 +01005501 // -- r8 : api_function_address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005502 // -----------------------------------
5503
5504#if defined(__MINGW64__) || defined(_WIN64)
5505 Register getter_arg = r8;
5506 Register accessor_info_arg = rdx;
5507 Register name_arg = rcx;
5508#else
5509 Register getter_arg = rdx;
5510 Register accessor_info_arg = rsi;
5511 Register name_arg = rdi;
5512#endif
5513 Register api_function_address = ApiGetterDescriptor::function_address();
5514 DCHECK(api_function_address.is(r8));
5515 Register scratch = rax;
5516
Ben Murdoch097c5b22016-05-18 11:27:45 +01005517 // v8::PropertyCallbackInfo::args_ array and name handle.
5518 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005519
Ben Murdoch097c5b22016-05-18 11:27:45 +01005520 // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521 const int kArgStackSpace = 1;
5522
Ben Murdoch097c5b22016-05-18 11:27:45 +01005523 // Load address of v8::PropertyAccessorInfo::args_ array.
5524 __ leap(scratch, Operand(rsp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005526 PrepareCallApiFunction(masm, kArgStackSpace);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005527 // Create v8::PropertyCallbackInfo object on the stack and initialize
5528 // it's args_ field.
5529 Operand info_object = StackSpaceOperand(0);
5530 __ movp(info_object, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005531
Ben Murdoch097c5b22016-05-18 11:27:45 +01005532 __ leap(name_arg, Operand(scratch, -kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005533 // The context register (rsi) has been saved in PrepareCallApiFunction and
5534 // could be used to pass arguments.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005535 __ leap(accessor_info_arg, info_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005536
5537 ExternalReference thunk_ref =
5538 ExternalReference::invoke_accessor_getter_callback(isolate());
5539
5540 // It's okay if api_function_address == getter_arg
5541 // but not accessor_info_arg or name_arg
5542 DCHECK(!api_function_address.is(accessor_info_arg) &&
5543 !api_function_address.is(name_arg));
5544
Ben Murdoch097c5b22016-05-18 11:27:45 +01005545 // +3 is to skip prolog, return address and name handle.
5546 Operand return_value_operand(
5547 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005548 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005549 kStackUnwindSpace, nullptr, return_value_operand,
5550 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005551}
5552
5553
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005554#undef __
5555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005556} // namespace internal
5557} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005558
5559#endif // V8_TARGET_ARCH_X64