blob: 602d3a01a926e18ea923affd85d4d832fcb9f148 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-stubs.h"
Ben Murdochda12d292016-06-02 14:46:10 +01008#include "src/api-arguments.h"
9#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/regexp/jsregexp.h"
16#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000018#include "src/x64/code-stubs-x64.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010019
20namespace v8 {
21namespace internal {
22
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023
24static void InitializeArrayConstructorDescriptor(
25 Isolate* isolate, CodeStubDescriptor* descriptor,
26 int constant_stack_parameter_count) {
27 Address deopt_handler = Runtime::FunctionForId(
28 Runtime::kArrayConstructor)->entry;
29
30 if (constant_stack_parameter_count == 0) {
31 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
32 JS_FUNCTION_STUB_MODE);
33 } else {
34 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036 }
37}
38
39
40static void InitializeInternalArrayConstructorDescriptor(
41 Isolate* isolate, CodeStubDescriptor* descriptor,
42 int constant_stack_parameter_count) {
43 Address deopt_handler = Runtime::FunctionForId(
44 Runtime::kInternalArrayConstructor)->entry;
45
46 if (constant_stack_parameter_count == 0) {
47 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
48 JS_FUNCTION_STUB_MODE);
49 } else {
50 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 }
53}
54
55
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056void ArraySingleArgumentConstructorStub::InitializeDescriptor(
57 CodeStubDescriptor* descriptor) {
58 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
59}
60
61
62void ArrayNArgumentsConstructorStub::InitializeDescriptor(
63 CodeStubDescriptor* descriptor) {
64 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
65}
66
67
Ben Murdochda12d292016-06-02 14:46:10 +010068void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
69 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
70 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
71}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072
73void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
76}
77
78
79void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
82}
83
84
Kristian Monsen80d68ea2010-09-08 11:05:35 +010085#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +010086
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087
88void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
89 ExternalReference miss) {
90 // Update the static counter each time a new code stub is generated.
91 isolate()->counters()->code_stubs()->Increment();
92
93 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 {
96 // Call the runtime system in a fresh internal frame.
97 FrameScope scope(masm, StackFrame::INTERNAL);
98 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100 // Push arguments
101 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 __ Push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 }
104 __ CallExternalReference(miss, param_count);
105 }
106
Steve Block1e0659c2011-05-24 12:43:12 +0100107 __ Ret();
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000108}
109
110
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100111void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100113 const int argument_count = 1;
114 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115 __ LoadAddress(arg_reg_1,
116 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100117
118 AllowExternalCallThatCantCauseGC scope(masm);
119 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100121 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000122 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100123 __ ret(0);
124}
125
126
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100127class FloatingPointHelper : public AllStatic {
128 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 enum ConvertUndefined {
130 CONVERT_UNDEFINED_TO_ZERO,
131 BAILOUT_ON_UNDEFINED
132 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100133 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
134 // If the operands are not both numbers, jump to not_numbers.
135 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
136 // NumberOperands assumes both are smis or heap numbers.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100137 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
138 Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100139};
140
141
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142void DoubleToIStub::Generate(MacroAssembler* masm) {
143 Register input_reg = this->source();
144 Register final_result_reg = this->destination();
145 DCHECK(is_truncating());
Ben Murdoch257744e2011-11-30 15:57:28 +0000146
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 Label check_negative, process_64_bits, done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000148
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 int double_offset = offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000150
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 // Account for return address and saved regs if input is rsp.
152 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000153
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
155 MemOperand exponent_operand(MemOperand(input_reg,
156 double_offset + kDoubleSize / 2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000157
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000158 Register scratch1;
159 Register scratch_candidates[3] = { rbx, rdx, rdi };
160 for (int i = 0; i < 3; i++) {
161 scratch1 = scratch_candidates[i];
162 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
Steve Block1e0659c2011-05-24 12:43:12 +0100163 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100164
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 // Since we must use rcx for shifts below, use some other register (rax)
166 // to calculate the result if ecx is the requested return register.
167 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
168 // Save ecx if it isn't the return register and therefore volatile, or if it
169 // is the return register, then save the temp register we use in its stead
170 // for the result.
171 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
172 __ pushq(scratch1);
173 __ pushq(save_reg);
174
175 bool stash_exponent_copy = !input_reg.is(rsp);
176 __ movl(scratch1, mantissa_operand);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 __ Movsd(xmm0, mantissa_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 __ movl(rcx, exponent_operand);
179 if (stash_exponent_copy) __ pushq(rcx);
180
181 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
182 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
183 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
184 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
185 __ j(below, &process_64_bits);
186
187 // Result is entirely in lower 32-bits of mantissa
188 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
189 __ subl(rcx, Immediate(delta));
190 __ xorl(result_reg, result_reg);
191 __ cmpl(rcx, Immediate(31));
192 __ j(above, &done);
193 __ shll_cl(scratch1);
194 __ jmp(&check_negative);
195
196 __ bind(&process_64_bits);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197 __ Cvttsd2siq(result_reg, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 __ jmp(&done, Label::kNear);
199
200 // If the double was negative, negate the integer result.
201 __ bind(&check_negative);
202 __ movl(result_reg, scratch1);
203 __ negl(result_reg);
204 if (stash_exponent_copy) {
205 __ cmpl(MemOperand(rsp, 0), Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100206 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000207 __ cmpl(exponent_operand, Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100208 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 __ cmovl(greater, result_reg, scratch1);
Steve Block1e0659c2011-05-24 12:43:12 +0100210
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 // Restore registers
Ben Murdochb0fe1622011-05-05 13:52:32 +0100212 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 if (stash_exponent_copy) {
214 __ addp(rsp, Immediate(kDoubleSize));
215 }
216 if (!final_result_reg.is(result_reg)) {
217 DCHECK(final_result_reg.is(rcx));
218 __ movl(final_result_reg, result_reg);
219 }
220 __ popq(save_reg);
221 __ popq(scratch1);
222 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100223}
224
225
226void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
227 Label* not_numbers) {
228 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
229 // Load operand in rdx into xmm0, or branch to not_numbers.
230 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
231 __ JumpIfSmi(rdx, &load_smi_rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100233 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000234 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100235 // Load operand in rax into xmm1, or branch to not_numbers.
236 __ JumpIfSmi(rax, &load_smi_rax);
237
238 __ bind(&load_nonsmi_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100240 __ j(not_equal, not_numbers);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000241 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100242 __ jmp(&done);
243
244 __ bind(&load_smi_rdx);
245 __ SmiToInteger32(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 __ Cvtlsi2sd(xmm0, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100247 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
248
249 __ bind(&load_smi_rax);
250 __ SmiToInteger32(kScratchRegister, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 __ Cvtlsi2sd(xmm1, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100252 __ bind(&done);
253}
254
255
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100256void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257 const Register exponent = MathPowTaggedDescriptor::exponent();
258 DCHECK(exponent.is(rdx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100259 const Register base = rax;
260 const Register scratch = rcx;
261 const XMMRegister double_result = xmm3;
262 const XMMRegister double_base = xmm2;
263 const XMMRegister double_exponent = xmm1;
264 const XMMRegister double_scratch = xmm4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100265
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100266 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100267
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100268 // Save 1 in double_result - we need this several times later on.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 __ movp(scratch, Immediate(1));
270 __ Cvtlsi2sd(double_result, scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100271
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100273 Label base_is_smi, unpack_exponent;
274 // The exponent and base are supplied as arguments on the stack.
275 // This can only happen if the stub is called from non-optimized code.
276 // Load input parameters from stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000277 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
278 __ movp(base, args.GetArgumentOperand(0));
279 __ movp(exponent, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100280 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
281 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
282 Heap::kHeapNumberMapRootIndex);
283 __ j(not_equal, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100284
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100286 __ jmp(&unpack_exponent, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100287
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100288 __ bind(&base_is_smi);
289 __ SmiToInteger32(base, base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 __ Cvtlsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291 __ bind(&unpack_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100292
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100293 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
294 __ SmiToInteger32(exponent, exponent);
295 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100296
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100297 __ bind(&exponent_not_smi);
298 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
299 Heap::kHeapNumberMapRootIndex);
300 __ j(not_equal, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000302 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
304 __ SmiToInteger32(exponent, exponent);
305 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100306
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100307 __ bind(&exponent_not_smi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100309 }
310
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000311 if (exponent_type() != INTEGER) {
312 Label fast_power, try_arithmetic_simplification;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313 // Detect integer exponents stored as double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 __ DoubleToI(exponent, double_exponent, double_scratch,
315 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
316 &try_arithmetic_simplification,
317 &try_arithmetic_simplification);
318 __ jmp(&int_exponent);
319
320 __ bind(&try_arithmetic_simplification);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 __ Cvttsd2si(exponent, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100322 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 __ cmpl(exponent, Immediate(0x1));
324 __ j(overflow, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100325
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 // Detect square root case. Crankshaft detects constant +/-0.5 at
328 // compile time and uses DoMathPowHalf instead. We then skip this check
329 // for non-constant cases of +/-0.5 as these hardly occur.
330 Label continue_sqrt, continue_rsqrt, not_plus_half;
331 // Test for 0.5.
332 // Load double_scratch with 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 __ Movq(double_scratch, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100335 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100337 __ j(not_equal, &not_plus_half, Label::kNear);
338
339 // Calculates square root of base. Check for the special case of
340 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
341 // According to IEEE-754, double-precision -Infinity has the highest
342 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 __ Movq(double_scratch, scratch);
345 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100346 // Comparing -Infinity with NaN results in "unordered", which sets the
347 // zero flag as if both were equal. However, it also sets the carry flag.
348 __ j(not_equal, &continue_sqrt, Label::kNear);
349 __ j(carry, &continue_sqrt, Label::kNear);
350
351 // Set result to Infinity in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000352 __ Xorpd(double_result, double_result);
353 __ Subsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354 __ jmp(&done);
355
356 __ bind(&continue_sqrt);
357 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 __ Xorpd(double_scratch, double_scratch);
359 __ Addsd(double_scratch, double_base); // Convert -0 to 0.
360 __ Sqrtsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 __ jmp(&done);
362
363 // Test for -0.5.
364 __ bind(&not_plus_half);
365 // Load double_scratch with -0.5 by substracting 1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366 __ Subsd(double_scratch, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100367 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100369 __ j(not_equal, &fast_power, Label::kNear);
370
371 // Calculates reciprocal of square root of base. Check for the special
372 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
373 // According to IEEE-754, double-precision -Infinity has the highest
374 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 __ Movq(double_scratch, scratch);
377 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100378 // Comparing -Infinity with NaN results in "unordered", which sets the
379 // zero flag as if both were equal. However, it also sets the carry flag.
380 __ j(not_equal, &continue_rsqrt, Label::kNear);
381 __ j(carry, &continue_rsqrt, Label::kNear);
382
383 // Set result to 0 in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384 __ Xorpd(double_result, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100385 __ jmp(&done);
386
387 __ bind(&continue_rsqrt);
388 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000389 __ Xorpd(double_exponent, double_exponent);
390 __ Addsd(double_exponent, double_base); // Convert -0 to +0.
391 __ Sqrtsd(double_exponent, double_exponent);
392 __ Divsd(double_result, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100393 __ jmp(&done);
394 }
395
396 // Using FPU instructions to calculate power.
397 Label fast_power_failed;
398 __ bind(&fast_power);
399 __ fnclex(); // Clear flags to catch exceptions later.
400 // Transfer (B)ase and (E)xponent onto the FPU register stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000401 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 __ Movsd(Operand(rsp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100403 __ fld_d(Operand(rsp, 0)); // E
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404 __ Movsd(Operand(rsp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100405 __ fld_d(Operand(rsp, 0)); // B, E
406
407 // Exponent is in st(1) and base is in st(0)
408 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
409 // FYL2X calculates st(1) * log2(st(0))
410 __ fyl2x(); // X
411 __ fld(0); // X, X
412 __ frndint(); // rnd(X), X
413 __ fsub(1); // rnd(X), X-rnd(X)
414 __ fxch(1); // X - rnd(X), rnd(X)
415 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
416 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
417 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100419 // FSCALE calculates st(0) * 2^st(1)
420 __ fscale(); // 2^X, rnd(X)
421 __ fstp(1);
422 // Bail out to runtime in case of exceptions in the status word.
423 __ fnstsw_ax();
424 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
425 __ j(not_zero, &fast_power_failed, Label::kNear);
426 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 __ Movsd(double_result, Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000428 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100429 __ jmp(&done);
430
431 __ bind(&fast_power_failed);
432 __ fninit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100434 __ jmp(&call_runtime);
435 }
436
437 // Calculate power with integer exponent.
438 __ bind(&int_exponent);
439 const XMMRegister double_scratch2 = double_exponent;
440 // Back up exponent as we need to check if exponent is negative later.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 __ movp(scratch, exponent); // Back up exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 __ Movsd(double_scratch, double_base); // Back up base.
443 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100444
445 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100447 __ testl(scratch, scratch);
448 __ j(positive, &no_neg, Label::kNear);
449 __ negl(scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100450 __ bind(&no_neg);
451
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 __ j(zero, &while_false, Label::kNear);
453 __ shrl(scratch, Immediate(1));
454 // Above condition means CF==0 && ZF==0. This means that the
455 // bit that has been shifted out is 0 and the result is not 0.
456 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457 __ Movsd(double_result, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 __ j(zero, &while_false, Label::kNear);
459
Ben Murdoch85b71792012-04-11 18:30:58 +0100460 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461 __ shrl(scratch, Immediate(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462 __ Mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000464 __ Mulsd(double_result, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100465 __ j(not_zero, &while_true);
466
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 // If the exponent is negative, return 1/result.
469 __ testl(exponent, exponent);
470 __ j(greater, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471 __ Divsd(double_scratch2, double_result);
472 __ Movsd(double_result, double_scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100473 // Test whether result is zero. Bail out to check for subnormal result.
474 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000475 __ Xorpd(double_scratch2, double_scratch2);
476 __ Ucomisd(double_scratch2, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100477 // double_exponent aliased as double_scratch2 has already been overwritten
478 // and may not have contained the exponent value in the first place when the
479 // input was a smi. We reset it with exponent value before bailing out.
480 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ Cvtlsi2sd(double_exponent, exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100482
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100483 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100485 // The arguments are still on the stack.
486 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100488
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100489 // The stub is called from non-optimized code, which expects the result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 // as heap number in rax.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100491 __ bind(&done);
492 __ AllocateHeapNumber(rax, rcx, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 __ ret(2 * kPointerSize);
495 } else {
496 __ bind(&call_runtime);
497 // Move base to the correct argument register. Exponent is already in xmm1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 __ Movsd(xmm0, double_base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 DCHECK(double_exponent.is(xmm1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100500 {
501 AllowExternalCallThatCantCauseGC scope(masm);
502 __ PrepareCallCFunction(2);
503 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 ExternalReference::power_double_double_function(isolate()), 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100505 }
506 // Return value is in xmm0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 __ Movsd(double_result, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100508
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100509 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100510 __ ret(0);
511 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100512}
513
514
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
516 Label miss;
517 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400518 // Ensure that the vector and slot registers won't be clobbered before
519 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
521 LoadDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522
523 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
524 r9, &miss);
525 __ bind(&miss);
526 PropertyAccessCompiler::TailCallBuiltin(
527 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
528}
529
530
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400531void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
532 // Return address is on the stack.
533 Label miss;
534
535 Register receiver = LoadDescriptor::ReceiverRegister();
536 Register index = LoadDescriptor::NameRegister();
537 Register scratch = rdi;
538 Register result = rax;
539 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
541 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400542
543 // StringCharAtGenerator doesn't use the result register until it's passed
544 // the different miss possibilities. If it did, we would have a conflict
545 // when FLAG_vector_ics is true.
546 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
547 &miss, // When not a string.
548 &miss, // When not a number.
549 &miss, // When index out of range.
550 STRING_INDEX_IS_ARRAY_INDEX,
551 RECEIVER_IS_STRING);
552 char_at_generator.GenerateFast(masm);
553 __ ret(0);
554
555 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400557
558 __ bind(&miss);
559 PropertyAccessCompiler::TailCallBuiltin(
560 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
561}
562
563
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100564void RegExpExecStub::Generate(MacroAssembler* masm) {
565 // Just jump directly to runtime if native RegExp is not selected at compile
566 // time or if regexp entry in generated code is turned off runtime switch or
567 // at compilation.
568#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100570#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100571
572 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000573 // rsp[0] : return address
574 // rsp[8] : last_match_info (expected JSArray)
575 // rsp[16] : previous index
576 // rsp[24] : subject string
577 // rsp[32] : JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100578
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 enum RegExpExecStubArgumentIndices {
580 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
581 SUBJECT_STRING_ARGUMENT_INDEX,
582 PREVIOUS_INDEX_ARGUMENT_INDEX,
583 LAST_MATCH_INFO_ARGUMENT_INDEX,
584 REG_EXP_EXEC_ARGUMENT_COUNT
585 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100586
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
588 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100589 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100590 // Ensure that a RegExp stack is allocated.
591 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100593 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100595 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596 __ testp(kScratchRegister, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100597 __ j(zero, &runtime);
598
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100599 // Check that the first argument is a JSRegExp object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100601 __ JumpIfSmi(rax, &runtime);
602 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
603 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100605 // Check that the RegExp has been compiled (data contains a fixed array).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100607 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100608 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100609 __ Check(NegateCondition(is_smi),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 kUnexpectedTypeForRegExpDataFixedArrayExpected);
Steve Block44f0eee2011-05-26 01:26:41 +0100611 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100613 }
614
Steve Block44f0eee2011-05-26 01:26:41 +0100615 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100616 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +0100617 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100618 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
619 __ j(not_equal, &runtime);
620
Steve Block44f0eee2011-05-26 01:26:41 +0100621 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100622 // Check that the number of captures fit in the static offsets vector buffer.
623 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +0100624 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000625 // Check (number_of_captures + 1) * 2 <= offsets vector size
626 // Or number_of_captures <= offsets vector size / 2 - 1
627 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
628 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100629 __ j(above, &runtime);
630
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000631 // Reset offset for possibly sliced string.
632 __ Set(r14, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
634 __ JumpIfSmi(rdi, &runtime);
635 __ movp(r15, rdi); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000636 // rax: RegExp data (FixedArray)
637 // rdi: subject string
638 // r15: subject string
639 // Handle subject string according to its encoding and representation:
640 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100641 // (2) Sequential one byte? If yes, go to (5).
642 // (3) Sequential or cons? If not, go to (6).
643 // (4) Cons string. If the string is flat, replace subject with first string
644 // and go to (1). Otherwise bail out to runtime.
645 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000646 // (E) Carry on.
647 /// [...]
648
649 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100650 // (6) Long external string? If not, go to (10).
651 // (7) External string. Make it, offset-wise, look like a sequential string.
652 // (8) Is the external string one byte? If yes, go to (5).
653 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100655 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000656
Ben Murdoch097c5b22016-05-18 11:27:45 +0100657 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
658 external_string /* 7 */, check_underlying /* 1 */,
659 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
660
661 __ bind(&check_underlying);
662 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
663 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664
665 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100666 __ andb(rbx, Immediate(kIsNotStringMask |
667 kStringRepresentationMask |
668 kStringEncodingMask |
669 kShortExternalStringMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100670 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 __ j(zero, &seq_two_byte_string); // Go to (9).
672
Ben Murdoch097c5b22016-05-18 11:27:45 +0100673 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100675 __ andb(rbx, Immediate(kIsNotStringMask |
676 kStringRepresentationMask |
677 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100678 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100679
Ben Murdoch097c5b22016-05-18 11:27:45 +0100680 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 // We check whether the subject string is a cons, since sequential strings
682 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000683 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
684 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100685 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
686 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687 __ cmpp(rbx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100688 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100689
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000690 // (4) Cons string. Check that it's flat.
691 // Replace subject with first string and reload instance type.
Steve Block44f0eee2011-05-26 01:26:41 +0100692 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693 Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100694 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 __ bind(&seq_one_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100700 // rax: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
702 __ Set(rcx, 1); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100703
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100705 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706 // r11: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100707 // Check that the irregexp code has been generated for the actual string
708 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +0000709 // smi (code flushing support)
710 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100711
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712 // rdi: sequential subject string (or look-alike, external string)
713 // r15: original subject string
714 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100715 // r11: code
716 // Load used arguments before starting to push arguments for call to native
717 // RegExp code to avoid handling changing stack height.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718 // We have to use r15 instead of rdi to load the length because rdi might
719 // have been only made to look like a sequential string when it actually
720 // is an external string.
721 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
722 __ JumpIfNotSmi(rbx, &runtime);
723 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
724 __ j(above_equal, &runtime);
725 __ SmiToInteger64(rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100726
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100727 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100728 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100730 // r11: code
731 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000732 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +0100733 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100734
Steve Block44f0eee2011-05-26 01:26:41 +0100735 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000736 static const int kRegExpExecuteArguments = 9;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100737 int argument_slots_on_stack =
738 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100739 __ EnterApiExitFrame(argument_slots_on_stack);
740
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000741 // Argument 9: Pass current isolate address.
742 __ LoadAddress(kScratchRegister,
743 ExternalReference::isolate_address(isolate()));
744 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100745 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100746
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 // Argument 8: Indicate that this is a direct call from JavaScript.
748 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100749 Immediate(1));
750
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 // Argument 7: Start (high end) of backtracking stack memory area.
752 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
753 __ movp(r9, Operand(kScratchRegister, 0));
754 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
755 __ addp(r9, Operand(kScratchRegister, 0));
756 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
757
758 // Argument 6: Set the number of capture registers to zero to force global
759 // regexps to behave as non-global. This does not affect non-global regexps.
760 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100761#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
763 Immediate(0));
764#else
765 __ Set(r9, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100766#endif
767
768 // Argument 5: static offsets vector buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000769 __ LoadAddress(
770 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100771 // Argument 5 passed in r8 on Linux and on the stack on Windows.
772#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100774#endif
775
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100776 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100777 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100779 // r11: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000780 // r14: slice offset
781 // r15: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100782
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100783 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 __ movp(arg_reg_2, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100785
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000786 // Argument 4: End of string data
787 // Argument 3: Start of string data
788 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
789 // Prepare start and end index of the input.
790 // Load the length from the original sliced string if that is the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 __ addp(rbx, r14);
792 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
793 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000794
795 // rbx: start index of the input
796 // r14: end index of the input
797 // r15: original subject string
798 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
799 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 __ leap(arg_reg_4,
801 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
802 __ leap(arg_reg_3,
803 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000804 __ jmp(&setup_rest, Label::kNear);
805 __ bind(&setup_two_byte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000806 __ leap(arg_reg_4,
807 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
808 __ leap(arg_reg_3,
809 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000810 __ bind(&setup_rest);
811
812 // Argument 1: Original subject string.
813 // The original subject is in the previous stack frame. Therefore we have to
814 // use rbp, which points exactly to one pointer size below the previous rsp.
815 // (Because creating a new stack frame pushes the previous rbp onto the stack
816 // and thereby moves up rsp by one kPointerSize.)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817 __ movp(arg_reg_1, r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100818
819 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100821 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100822
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000823 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100824
825 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +0000826 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100827 Label exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 __ cmpl(rax, Immediate(1));
829 // We expect exactly one result since we force the called regexp to behave
830 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +0000831 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100832 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100833 __ j(equal, &exception);
834 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
835 // If none of the above, it can only be retry.
836 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100837 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100838
839 // For failure return null.
840 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100842
843 // Load RegExp data.
844 __ bind(&success);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
846 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100847 __ SmiToInteger32(rax,
848 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
849 // Calculate number of capture registers (number_of_captures + 1) * 2.
850 __ leal(rdx, Operand(rax, rax, times_1, 2));
851
852 // rdx: Number of capture registers
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000853 // Check that the fourth object is a JSArray object.
854 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
855 __ JumpIfSmi(r15, &runtime);
856 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
857 __ j(not_equal, &runtime);
858 // Check that the JSArray is in fast case.
859 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
860 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
861 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
862 __ j(not_equal, &runtime);
863 // Check that the last match info has space for the capture registers and the
864 // additional information. Ensure no overflow in add.
865 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
866 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
867 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
868 __ cmpl(rdx, rax);
869 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100870
871 // rbx: last_match_info backing store (FixedArray)
872 // rdx: number of capture registers
873 // Store the capture count.
874 __ Integer32ToSmi(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000875 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100876 kScratchRegister);
877 // Store last subject and last input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
879 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
880 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100881 __ RecordWriteField(rbx,
882 RegExpImpl::kLastSubjectOffset,
883 rax,
884 rdi,
885 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ movp(rax, rcx);
887 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100888 __ RecordWriteField(rbx,
889 RegExpImpl::kLastInputOffset,
890 rax,
891 rdi,
892 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100893
894 // Get the static offsets vector filled by the native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895 __ LoadAddress(
896 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100897
898 // rbx: last_match_info backing store (FixedArray)
899 // rcx: offsets vector
900 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000901 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100902 // Capture register counter starts from number of capture registers and
903 // counts down until wraping after zero.
904 __ bind(&next_capture);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 __ subp(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000906 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100907 // Read the value from the static offsets vector buffer and make it a smi.
908 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100909 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100910 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911 __ movp(FieldOperand(rbx,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100912 rdx,
913 times_pointer_size,
914 RegExpImpl::kFirstCaptureOffset),
915 rdi);
916 __ jmp(&next_capture);
917 __ bind(&done);
918
919 // Return last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 __ movp(rax, r15);
921 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100922
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100923 __ bind(&exception);
924 // Result must now be exception. If there is no pending exception already a
925 // stack overflow (on the backtrack stack) was detected in RegExp code but
926 // haven't created the exception yet. Handle that in the runtime system.
927 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +0100928 ExternalReference pending_exception_address(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 Isolate::kPendingExceptionAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100930 Operand pending_exception_operand =
931 masm->ExternalOperand(pending_exception_address, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 __ movp(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100933 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 __ cmpp(rax, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100935 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100936
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000937 // For exception, throw the exception again.
938 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100939
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 // Do the runtime call to execute the regexp.
941 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943
944 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100945 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 __ bind(&not_seq_nor_cons);
947 // Compare flags are still set from (3).
948 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
949
Ben Murdoch097c5b22016-05-18 11:27:45 +0100950 // (7) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100951 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100953 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
954 if (FLAG_debug_code) {
955 // Assert that we do not have a cons or slice (indirect strings) here.
956 // Sequential strings have already been ruled out.
957 __ testb(rbx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100959 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100961 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
963 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100964 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100965 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100966 __ testb(rbx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100967 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000968
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000969 // rdi: subject string (flat two-byte)
970 // rax: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100971 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000972 __ bind(&seq_two_byte_string);
973 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
974 __ Set(rcx, 0); // Type is two byte.
975 __ jmp(&check_code); // Go to (E).
976
977 // (10) Not a string or a short external string? If yes, bail out to runtime.
978 __ bind(&not_long_external);
979 // Catch non-string subject or short external string.
980 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
981 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
982 __ j(not_zero, &runtime);
983
Ben Murdoch097c5b22016-05-18 11:27:45 +0100984 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000985 // Load offset into r14 and replace subject string with parent.
986 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
987 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
988 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100989#endif // V8_INTERPRETED_REGEXP
990}
991
992
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100993static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 DCHECK(cc != equal);
995 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100996 || (cc == greater) || (cc == greater_equal));
997 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
998}
999
1000
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001static void CheckInputType(MacroAssembler* masm, Register input,
1002 CompareICState::State expected, Label* fail) {
1003 Label ok;
1004 if (expected == CompareICState::SMI) {
1005 __ JumpIfNotSmi(input, fail);
1006 } else if (expected == CompareICState::NUMBER) {
1007 __ JumpIfSmi(input, &ok);
1008 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1009 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001010 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 // We could be strict about internalized/non-internalized here, but as long as
1012 // hydrogen doesn't care, the stub doesn't have to care either.
1013 __ bind(&ok);
1014}
1015
1016
1017static void BranchIfNotInternalizedString(MacroAssembler* masm,
1018 Label* label,
1019 Register object,
1020 Register scratch) {
1021 __ JumpIfSmi(object, label);
1022 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1023 __ movzxbp(scratch,
1024 FieldOperand(scratch, Map::kInstanceTypeOffset));
1025 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1026 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1027 __ j(not_zero, label);
1028}
1029
1030
1031void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001032 Label runtime_call, check_unequal_objects, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 Condition cc = GetCondition();
1034 Factory* factory = isolate()->factory();
1035
1036 Label miss;
1037 CheckInputType(masm, rdx, left(), &miss);
1038 CheckInputType(masm, rax, right(), &miss);
1039
1040 // Compare two smis.
1041 Label non_smi, smi_done;
1042 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1043 __ subp(rdx, rax);
1044 __ j(no_overflow, &smi_done);
1045 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1046 __ bind(&smi_done);
1047 __ movp(rax, rdx);
1048 __ ret(0);
1049 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001050
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001051 // The compare stub returns a positive, negative, or zero 64-bit integer
1052 // value in rax, corresponding to result of comparing the two inputs.
1053 // NOTICE! This code is only reached after a smi-fast-case check, so
1054 // it is certain that at least one operand isn't a smi.
1055
1056 // Two identical objects are equal unless they are both NaN or undefined.
1057 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001058 Label not_identical;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 __ cmpp(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001060 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001061
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001062 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001063 // Check for undefined. undefined OP undefined is false even though
1064 // undefined == undefined.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001065 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001066 Label check_for_nan;
1067 __ j(not_equal, &check_for_nan, Label::kNear);
1068 __ Set(rax, NegativeComparisonResult(cc));
1069 __ ret(0);
1070 __ bind(&check_for_nan);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001071 }
1072
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001073 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001074 // so we do the second best thing - test it ourselves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075 Label heap_number;
1076 // If it's not a heap number, then return equal for (in)equality operator.
1077 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1078 factory->heap_number_map());
1079 __ j(equal, &heap_number, Label::kNear);
1080 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1082 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001083 // Call runtime on identical objects. Otherwise return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
1085 __ j(above_equal, &runtime_call, Label::kFar);
1086 // Call runtime on identical symbols since we need to throw a TypeError.
1087 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
1088 __ j(equal, &runtime_call, Label::kFar);
1089 // Call runtime on identical SIMD values since we must throw a TypeError.
1090 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
1091 __ j(equal, &runtime_call, Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001092 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 __ Set(rax, EQUAL);
1094 __ ret(0);
1095
1096 __ bind(&heap_number);
1097 // It is a heap number, so return equal if it's not NaN.
1098 // For NaN, return 1 for every condition except greater and
1099 // greater-equal. Return -1 for them, so the comparison yields
1100 // false for all conditions except not-equal.
1101 __ Set(rax, EQUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1103 __ Ucomisd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001104 __ setcc(parity_even, rax);
1105 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1106 if (cc == greater_equal || cc == greater) {
1107 __ negp(rax);
1108 }
1109 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001110
1111 __ bind(&not_identical);
1112 }
1113
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 if (cc == equal) { // Both strict and non-strict.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001115 Label slow; // Fallthrough label.
1116
1117 // If we're doing a strict equality comparison, we don't have to do
1118 // type conversion, so we generate code to do fast comparison for objects
1119 // and oddballs. Non-smi numbers and strings still go through the usual
1120 // slow-case code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001122 // If either is a Smi (we know that not both are), then they can only
1123 // be equal if the other is a HeapNumber. If so, use the slow case.
1124 {
1125 Label not_smis;
1126 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1127
1128 // Check if the non-smi operand is a heap number.
1129 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001130 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001131 // If heap number, handle it in the slow case.
1132 __ j(equal, &slow);
1133 // Return non-equal. ebx (the lower half of rbx) is not zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134 __ movp(rax, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001135 __ ret(0);
1136
1137 __ bind(&not_smis);
1138 }
1139
1140 // If either operand is a JSObject or an oddball value, then they are not
1141 // equal since their pointers are different
1142 // There is no test for undetectability in strict equality.
1143
1144 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001145 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001146 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001148 __ j(below, &first_non_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001149 // Return non-zero (rax (not rax) is not zero)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001150 Label return_not_equal;
1151 STATIC_ASSERT(kHeapObjectTag != 0);
1152 __ bind(&return_not_equal);
1153 __ ret(0);
1154
1155 __ bind(&first_non_object);
1156 // Check for oddballs: true, false, null, undefined.
1157 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1158 __ j(equal, &return_not_equal);
1159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001161 __ j(above_equal, &return_not_equal);
1162
1163 // Check for oddballs: true, false, null, undefined.
1164 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1165 __ j(equal, &return_not_equal);
1166
1167 // Fall through to the general case.
1168 }
1169 __ bind(&slow);
1170 }
1171
1172 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 Label non_number_comparison;
1174 Label unordered;
1175 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1176 __ xorl(rax, rax);
1177 __ xorl(rcx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178 __ Ucomisd(xmm0, xmm1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001179
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001180 // Don't base result on EFLAGS when a NaN is involved.
1181 __ j(parity_even, &unordered, Label::kNear);
1182 // Return a result of -1, 0, or 1, based on EFLAGS.
1183 __ setcc(above, rax);
1184 __ setcc(below, rcx);
1185 __ subp(rax, rcx);
1186 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001187
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001188 // If one of the numbers was NaN, then the result is always false.
1189 // The cc is never not-equal.
1190 __ bind(&unordered);
1191 DCHECK(cc != not_equal);
1192 if (cc == less || cc == less_equal) {
1193 __ Set(rax, 1);
1194 } else {
1195 __ Set(rax, -1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001196 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001197 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199 // The number comparison code did not provide a valid result.
1200 __ bind(&non_number_comparison);
1201
1202 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001203 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001204 if (cc == equal) {
1205 BranchIfNotInternalizedString(
1206 masm, &check_for_strings, rax, kScratchRegister);
1207 BranchIfNotInternalizedString(
1208 masm, &check_for_strings, rdx, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 // We've already checked for object identity, so if both operands are
1211 // internalized strings they aren't equal. Register rax (not rax) already
1212 // holds a non-zero value, which indicates not equal, so just return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001213 __ ret(0);
1214 }
1215
1216 __ bind(&check_for_strings);
1217
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1219 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 // Inline comparison of one-byte strings.
1222 if (cc == equal) {
1223 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001224 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1226 rdi, r8);
Ben Murdoch257744e2011-11-30 15:57:28 +00001227 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001228
1229#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001231#endif
1232
1233 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001234 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001235 // Not strict equality. Objects are unequal if
1236 // they are both JSObjects and not undetectable,
1237 // and their pointers are different.
Ben Murdochda12d292016-06-02 14:46:10 +01001238 Label return_equal, return_unequal, undetectable;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001239 // At most one is a smi, so we can test for smi by adding the two.
1240 // A smi plus a heap object has the low bit set, a heap object plus
1241 // a heap object has the low bit clear.
1242 STATIC_ASSERT(kSmiTag == 0);
1243 STATIC_ASSERT(kSmiTagMask == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001244 __ leap(rcx, Operand(rax, rdx, times_1, 0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001245 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 __ j(not_zero, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001247
1248 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1249 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001250 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1251 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001252 __ j(not_zero, &undetectable, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001253 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1254 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001255 __ j(not_zero, &return_unequal, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001256
1257 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
1258 __ j(below, &runtime_call, Label::kNear);
1259 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
1260 __ j(below, &runtime_call, Label::kNear);
1261
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001262 __ bind(&return_unequal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001263 // Return non-equal by returning the non-zero object pointer in rax.
1264 __ ret(0);
1265
1266 __ bind(&undetectable);
1267 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1268 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001269 __ j(zero, &return_unequal, Label::kNear);
1270
1271 // If both sides are JSReceivers, then the result is false according to
1272 // the HTML specification, which says that only comparisons with null or
1273 // undefined are affected by special casing for document.all.
1274 __ CmpInstanceType(rbx, ODDBALL_TYPE);
1275 __ j(zero, &return_equal, Label::kNear);
1276 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1277 __ j(not_zero, &return_unequal, Label::kNear);
1278
1279 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001280 __ Set(rax, EQUAL);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001281 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001282 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001286 {
1287 FrameScope scope(masm, StackFrame::INTERNAL);
1288 __ Push(rdx);
1289 __ Push(rax);
1290 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1291 }
1292 // Turn true into 0 and false into some non-zero value.
1293 STATIC_ASSERT(EQUAL == 0);
1294 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1295 __ subp(rax, rdx);
1296 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001297 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001298 // Push arguments below the return address to prepare jump to builtin.
1299 __ PopReturnAddressTo(rcx);
1300 __ Push(rdx);
1301 __ Push(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001303 __ PushReturnAddressFrom(rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001304 __ TailCallRuntime(Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001305 }
1306
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 __ bind(&miss);
1308 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001309}
1310
1311
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001312static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1313 // rax : number of arguments to the construct function
1314 // rbx : feedback vector
1315 // rdx : slot in feedback vector (Smi)
1316 // rdi : the function to call
1317 FrameScope scope(masm, StackFrame::INTERNAL);
1318
1319 // Number-of-arguments register must be smi-tagged to call out.
1320 __ Integer32ToSmi(rax, rax);
1321 __ Push(rax);
1322 __ Push(rdi);
1323 __ Integer32ToSmi(rdx, rdx);
1324 __ Push(rdx);
1325 __ Push(rbx);
1326
1327 __ CallStub(stub);
1328
1329 __ Pop(rbx);
1330 __ Pop(rdx);
1331 __ Pop(rdi);
1332 __ Pop(rax);
1333 __ SmiToInteger32(rax, rax);
1334}
1335
1336
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001337static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001339 // are uninitialized, monomorphic (indicated by a JSFunction), and
1340 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001341 // rax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342 // rbx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001343 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001344 // rdi : the function to call
1345 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 Label initialize, done, miss, megamorphic, not_array_function,
1347 done_no_smi_convert;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001348
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349 // Load the cache state into r11.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001350 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001351 __ movp(r11,
1352 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001353
1354 // A monomorphic cache hit or an already megamorphic state: invoke the
1355 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001356 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1357 // at this position in a symbol (see static asserts in
1358 // type-feedback-vector.h).
1359 Label check_allocation_site;
1360 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1361 __ j(equal, &done, Label::kFar);
1362 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1363 __ j(equal, &done, Label::kFar);
1364 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1365 Heap::kWeakCellMapRootIndex);
1366 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 // If the weak cell is cleared, we have a new chance to become monomorphic.
1369 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1370 __ j(equal, &initialize);
1371 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001372
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373 __ bind(&check_allocation_site);
1374 // If we came here, we need to see if we are the array function.
1375 // If we didn't have a matching function, and we didn't find the megamorph
1376 // sentinel, then we have in the slot either some other function or an
1377 // AllocationSite.
1378 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1379 __ j(not_equal, &miss);
1380
1381 // Make sure the function is the Array() function
1382 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1383 __ cmpp(rdi, r11);
1384 __ j(not_equal, &megamorphic);
1385 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001386
1387 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001388
1389 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1390 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001391 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001393 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1394 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001395 __ bind(&megamorphic);
1396 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1397 TypeFeedbackVector::MegamorphicSentinel(isolate));
1398 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001399
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 // An uninitialized cache is patched with the function or sentinel to
1401 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001402 __ bind(&initialize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001403
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 // Make sure the function is the Array() function
1405 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1406 __ cmpp(rdi, r11);
1407 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001408
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001409 CreateAllocationSiteStub create_stub(isolate);
1410 CallStubInRecordCallTarget(masm, &create_stub);
1411 __ jmp(&done_no_smi_convert);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001412
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 __ bind(&not_array_function);
1414 CreateWeakCellStub weak_cell_stub(isolate);
1415 CallStubInRecordCallTarget(masm, &weak_cell_stub);
1416 __ jmp(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001417
1418 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 __ Integer32ToSmi(rdx, rdx);
1420
1421 __ bind(&done_no_smi_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001422}
1423
1424
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001425void CallConstructStub::Generate(MacroAssembler* masm) {
1426 // rax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427 // rbx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001429 // rdi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 Label non_function;
1432 // Check that the constructor is not a smi.
1433 __ JumpIfSmi(rdi, &non_function);
1434 // Check that constructor is a JSFunction.
1435 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1436 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001437
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001440 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001441 Label feedback_register_initialized;
1442 // Put the AllocationSite from the feedback vector into rbx, or undefined.
1443 __ movp(rbx,
1444 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1445 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
1446 __ j(equal, &feedback_register_initialized, Label::kNear);
1447 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1448 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001449
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001450 __ AssertUndefinedOrAllocationSite(rbx);
1451
1452 // Pass new target to construct stub.
1453 __ movp(rdx, rdi);
1454
1455 // Tail call to the function-specific construct stub (still in the caller
1456 // context at this point).
1457 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1458 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
1459 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
1460 __ jmp(rcx);
1461
1462 __ bind(&non_function);
1463 __ movp(rdx, rdi);
1464 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1465}
1466
1467
1468void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1469 // rdi - function
1470 // rdx - slot id
1471 // rbx - vector
1472 // rcx - allocation site (loaded from vector[slot]).
1473 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1474 __ cmpp(rdi, r8);
1475 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001476
1477 __ movp(rax, Immediate(arg_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478
1479 // Increment the call count for monomorphic function calls.
1480 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1481 FixedArray::kHeaderSize + kPointerSize),
1482 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483
1484 __ movp(rbx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001485 __ movp(rdx, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001486 ArrayConstructorStub stub(masm->isolate(), arg_count());
1487 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488}
1489
1490
1491void CallICStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001492 // ----------- S t a t e -------------
1493 // -- rdi - function
1494 // -- rdx - slot id
1495 // -- rbx - vector
1496 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001497 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001498 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001499 int argc = arg_count();
1500 StackArgumentsAccessor args(rsp, argc);
1501 ParameterCount actual(argc);
1502
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 // The checks. First, does rdi match the recorded monomorphic target?
1504 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 __ movp(rcx,
1506 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1507
1508 // We don't know that we have a weak cell. We might have a private symbol
1509 // or an AllocationSite, but the memory is safe to examine.
1510 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1511 // FixedArray.
1512 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1513 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1514 // computed, meaning that it can't appear to be a pointer. If the low bit is
1515 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1516 // to be a pointer.
1517 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1518 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1519 WeakCell::kValueOffset &&
1520 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1521
1522 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 __ j(not_equal, &extra_checks_or_miss);
1524
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 // The compare above could have been a SMI/SMI comparison. Guard against this
1526 // convincing us that we have a monomorphic JSFunction.
1527 __ JumpIfSmi(rdi, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001529 // Increment the call count for monomorphic function calls.
1530 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1531 FixedArray::kHeaderSize + kPointerSize),
1532 Smi::FromInt(CallICNexus::kCallCountIncrement));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 __ bind(&call_function);
1535 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1537 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001538 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539
1540 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001542
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 __ j(equal, &call);
1545
1546 // Check if we have an allocation site.
1547 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
1548 Heap::kAllocationSiteMapRootIndex);
1549 __ j(not_equal, &not_allocation_site);
1550
1551 // We have an allocation site.
1552 HandleArrayCase(masm, &miss);
1553
1554 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001556 // The following cases attempt to handle MISS cases without going to the
1557 // runtime.
1558 if (FLAG_trace_ic) {
1559 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560 }
1561
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001562 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1563 __ j(equal, &uninitialized);
1564
1565 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1566 // to handle it here. More complex cases are dealt with in the runtime.
1567 __ AssertNotSmi(rcx);
1568 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1569 __ j(not_equal, &miss);
1570 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1571 TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572
1573 __ bind(&call);
1574 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001575 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001577
1578 __ bind(&uninitialized);
1579
1580 // We are going monomorphic, provided we actually have a JSFunction.
1581 __ JumpIfSmi(rdi, &miss);
1582
1583 // Goto miss case if we do not have a function.
1584 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1585 __ j(not_equal, &miss);
1586
1587 // Make sure the function is not the Array() function, which requires special
1588 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001589 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001590 __ cmpp(rdi, rcx);
1591 __ j(equal, &miss);
1592
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 // Make sure the function belongs to the same native context.
1594 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1595 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
1596 __ cmpp(rcx, NativeContextOperand());
1597 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001598
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001599 // Initialize the call counter.
1600 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
1601 FixedArray::kHeaderSize + kPointerSize),
1602 Smi::FromInt(CallICNexus::kCallCountIncrement));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 // Store the function. Use a stub since we need a frame for allocation.
1605 // rbx - vector
1606 // rdx - slot (needs to be in smi form)
1607 // rdi - function
1608 {
1609 FrameScope scope(masm, StackFrame::INTERNAL);
1610 CreateWeakCellStub create_stub(isolate);
1611
1612 __ Integer32ToSmi(rdx, rdx);
1613 __ Push(rdi);
1614 __ CallStub(&create_stub);
1615 __ Pop(rdi);
1616 }
1617
1618 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001619
1620 // We are here because tracing is on or we encountered a MISS case we can't
1621 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001622 __ bind(&miss);
1623 GenerateMiss(masm);
1624
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626
1627 // Unreachable
1628 __ int3();
1629}
1630
1631
1632void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 // Push the receiver and the function and feedback info.
1636 __ Push(rdi);
1637 __ Push(rbx);
1638 __ Integer32ToSmi(rdx, rdx);
1639 __ Push(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641 // Call the entry.
1642 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644 // Move result to edi and exit the internal frame.
1645 __ movp(rdi, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646}
1647
1648
Steve Block44f0eee2011-05-26 01:26:41 +01001649bool CEntryStub::NeedsImmovableCode() {
1650 return false;
1651}
1652
1653
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1655 CEntryStub::GenerateAheadOfTime(isolate);
1656 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1657 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001658 // It is important that the store buffer overflow stubs are generated first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1660 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001661 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 BinaryOpICStub::GenerateAheadOfTime(isolate);
1663 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664 StoreFastElementStub::GenerateAheadOfTime(isolate);
1665 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001666}
1667
1668
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001669void CodeStub::GenerateFPStubs(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001670}
1671
1672
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1674 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1675 stub.GetCode();
1676 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1677 save_doubles.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001678}
1679
1680
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001681void CEntryStub::Generate(MacroAssembler* masm) {
1682 // rax: number of arguments including receiver
1683 // rbx: pointer to C function (C callee-saved)
1684 // rbp: frame pointer of calling JS frame (restored after C call)
1685 // rsp: stack pointer (restored after C call)
1686 // rsi: current context (restored)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001687 //
1688 // If argv_in_register():
1689 // r15: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001690
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001692
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001693#ifdef _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001694 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
1695 // stack to be aligned to 16 bytes. It only allows a single-word to be
1696 // returned in register rax. Larger return sizes must be written to an address
1697 // passed as a hidden first argument.
1698 const Register kCCallArg0 = rcx;
1699 const Register kCCallArg1 = rdx;
1700 const Register kCCallArg2 = r8;
1701 const Register kCCallArg3 = r9;
1702 const int kArgExtraStackSpace = 2;
1703 const int kMaxRegisterResultSize = 1;
1704#else
1705 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
1706 // are returned in rax, and a struct of two pointers are returned in rax+rdx.
1707 // Larger return sizes must be written to an address passed as a hidden first
1708 // argument.
1709 const Register kCCallArg0 = rdi;
1710 const Register kCCallArg1 = rsi;
1711 const Register kCCallArg2 = rdx;
1712 const Register kCCallArg3 = rcx;
1713 const int kArgExtraStackSpace = 0;
1714 const int kMaxRegisterResultSize = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001715#endif // _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001716
1717 // Enter the exit frame that transitions from JavaScript to C++.
1718 int arg_stack_space =
1719 kArgExtraStackSpace +
1720 (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 if (argv_in_register()) {
1722 DCHECK(!save_doubles());
1723 __ EnterApiExitFrame(arg_stack_space);
1724 // Move argc into r14 (argv is already in r15).
1725 __ movp(r14, rax);
1726 } else {
1727 __ EnterExitFrame(arg_stack_space, save_doubles());
1728 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001729
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001730 // rbx: pointer to builtin function (C callee-saved).
1731 // rbp: frame pointer of exit frame (restored after C call).
1732 // rsp: stack pointer (restored after C call).
1733 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01001734 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001735
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 // Check stack alignment.
1737 if (FLAG_debug_code) {
1738 __ CheckStackAlignment();
1739 }
1740
Ben Murdoch097c5b22016-05-18 11:27:45 +01001741 // Call C function. The arguments object will be created by stubs declared by
1742 // DECLARE_RUNTIME_FUNCTION().
1743 if (result_size() <= kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 // Pass a pointer to the Arguments object as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001745 // Return result in single register (rax), or a register pair (rax, rdx).
1746 __ movp(kCCallArg0, r14); // argc.
1747 __ movp(kCCallArg1, r15); // argv.
1748 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001749 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001750 DCHECK_LE(result_size(), 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001751 // Pass a pointer to the result location as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001752 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 // Pass a pointer to the Arguments object as the second argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001754 __ movp(kCCallArg1, r14); // argc.
1755 __ movp(kCCallArg2, r15); // argv.
1756 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 __ call(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759
Ben Murdoch097c5b22016-05-18 11:27:45 +01001760 if (result_size() > kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001761 // Read result values stored on stack. Result is stored
Ben Murdoch097c5b22016-05-18 11:27:45 +01001762 // above the the two Arguments object slots on Win64.
1763 DCHECK_LE(result_size(), 3);
1764 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
1765 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
1766 if (result_size() > 2) {
1767 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
1768 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001769 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001770 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001772 // Check result for exception sentinel.
1773 Label exception_returned;
1774 __ CompareRoot(rax, Heap::kExceptionRootIndex);
1775 __ j(equal, &exception_returned);
1776
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001777 // Check that there is no pending exception, otherwise we
1778 // should have returned the exception sentinel.
1779 if (FLAG_debug_code) {
1780 Label okay;
1781 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001782 ExternalReference pending_exception_address(
1783 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784 Operand pending_exception_operand =
1785 masm->ExternalOperand(pending_exception_address);
1786 __ cmpp(r14, pending_exception_operand);
1787 __ j(equal, &okay, Label::kNear);
1788 __ int3();
1789 __ bind(&okay);
1790 }
1791
1792 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001794 __ ret(0);
1795
1796 // Handling of exception.
1797 __ bind(&exception_returned);
1798
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001799 ExternalReference pending_handler_context_address(
1800 Isolate::kPendingHandlerContextAddress, isolate());
1801 ExternalReference pending_handler_code_address(
1802 Isolate::kPendingHandlerCodeAddress, isolate());
1803 ExternalReference pending_handler_offset_address(
1804 Isolate::kPendingHandlerOffsetAddress, isolate());
1805 ExternalReference pending_handler_fp_address(
1806 Isolate::kPendingHandlerFPAddress, isolate());
1807 ExternalReference pending_handler_sp_address(
1808 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001810 // Ask the runtime for help to determine the handler. This will set rax to
1811 // contain the current pending exception, don't clobber it.
1812 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1813 isolate());
1814 {
1815 FrameScope scope(masm, StackFrame::MANUAL);
1816 __ movp(arg_reg_1, Immediate(0)); // argc.
1817 __ movp(arg_reg_2, Immediate(0)); // argv.
1818 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
1819 __ PrepareCallCFunction(3);
1820 __ CallCFunction(find_handler, 3);
1821 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 // Retrieve the handler context, SP and FP.
1824 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1825 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1826 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001827
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001828 // If the handler is a JS frame, restore the context to the frame. Note that
1829 // the context will be set to (rsi == 0) for non-JS frames.
1830 Label skip;
1831 __ testp(rsi, rsi);
1832 __ j(zero, &skip, Label::kNear);
1833 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1834 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001835
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836 // Compute the handler entry address and jump to it.
1837 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1838 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1839 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1840 __ jmp(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001841}
1842
1843
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001845 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001846 Label not_outermost_js, not_outermost_js_2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001847
1848 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1849
Steve Block44f0eee2011-05-26 01:26:41 +01001850 { // NOLINT. Scope block confuses linter.
1851 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001852 // Set up frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 __ pushq(rbp);
1854 __ movp(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001855
Ben Murdochda12d292016-06-02 14:46:10 +01001856 // Push the stack frame type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857 int marker = type();
Ben Murdochda12d292016-06-02 14:46:10 +01001858 __ Push(Smi::FromInt(marker)); // context slot
1859 ExternalReference context_address(Isolate::kContextAddress, isolate());
1860 __ Load(kScratchRegister, context_address);
1861 __ Push(kScratchRegister); // context
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001862 // Save callee-saved registers (X64/X32/Win64 calling conventions).
1863 __ pushq(r12);
1864 __ pushq(r13);
1865 __ pushq(r14);
1866 __ pushq(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001867#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1869 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001870#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871 __ pushq(rbx);
1872
1873#ifdef _WIN64
1874 // On Win64 XMM6-XMM15 are callee-save
1875 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1876 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1877 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1878 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1879 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1880 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1881 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1882 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1883 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1884 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1885 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1886#endif
Steve Block44f0eee2011-05-26 01:26:41 +01001887
1888 // Set up the roots and smi constant registers.
1889 // Needs to be done before any further smi loads.
Steve Block44f0eee2011-05-26 01:26:41 +01001890 __ InitializeRootRegister();
1891 }
1892
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001893 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001894 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001895 {
1896 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001897 __ Push(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001898 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001899
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001900 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001901 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001902 __ Load(rax, js_entry_sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 __ testp(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001904 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001905 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 __ movp(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01001907 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01001908 Label cont;
1909 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001910 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001911 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
1912 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001913
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001914 // Jump to a faked try block that does the invoke, with a faked catch
1915 // block that sets the pending exception.
1916 __ jmp(&invoke);
1917 __ bind(&handler_entry);
1918 handler_offset_ = handler_entry.pos();
1919 // Caught exception: Store result (exception) in the pending exception
1920 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00001921 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001922 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001923 __ Store(pending_exception, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 __ LoadRoot(rax, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001925 __ jmp(&exit);
1926
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001927 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001928 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001929 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001930
1931 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01001932 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
1933 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001934
1935 // Fake a receiver (NULL).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 __ Push(Immediate(0)); // receiver
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001937
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001938 // Invoke the function by calling through JS entry trampoline builtin and
1939 // pop the faked function when we return. We load the address from an
1940 // external reference instead of inlining the call target address directly
1941 // in the code, because the builtin stubs may not have been generated yet
1942 // at the time this code is generated.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001943 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001944 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001945 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001946 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001947 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001949 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001950 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001952 __ call(kScratchRegister);
1953
1954 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001955 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001956
Steve Block053d10c2011-06-13 19:13:29 +01001957 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01001958 // Check if the current stack frame is marked as the outermost JS frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 __ Pop(rbx);
Steve Block053d10c2011-06-13 19:13:29 +01001960 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001961 __ j(not_equal, &not_outermost_js_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 __ Move(kScratchRegister, js_entry_sp);
1963 __ movp(Operand(kScratchRegister, 0), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001964 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001965
1966 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01001967 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 __ Pop(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001969 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001970
1971 // Restore callee-saved registers (X64 conventions).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972#ifdef _WIN64
1973 // On Win64 XMM6-XMM15 are callee-save
1974 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
1975 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
1976 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
1977 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
1978 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
1979 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
1980 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
1981 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
1982 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
1983 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
1984 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1985#endif
1986
1987 __ popq(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001988#ifdef _WIN64
1989 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001990 __ popq(rsi);
1991 __ popq(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001992#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 __ popq(r15);
1994 __ popq(r14);
1995 __ popq(r13);
1996 __ popq(r12);
1997 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001998
1999 // Restore frame pointer and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 __ popq(rbp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002001 __ ret(0);
2002}
2003
2004
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002005// -------------------------------------------------------------------------
2006// StringCharCodeAtGenerator
2007
2008void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002009 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002010 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2011 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002012
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002013 // Fetch the instance type of the receiver into result register.
2014 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2015 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2016 // If the receiver is not a string trigger the non-string case.
2017 __ testb(result_, Immediate(kIsNotStringMask));
2018 __ j(not_zero, receiver_not_string_);
2019 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002020
2021 // If the index is non-smi trigger the non-smi case.
2022 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002023 __ bind(&got_smi_index_);
2024
2025 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002026 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002027 __ j(above_equal, index_out_of_range_);
2028
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002029 __ SmiToInteger32(index_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002030
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002031 StringCharLoadGenerator::Generate(
2032 masm, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002033
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002034 __ Integer32ToSmi(result_, result_);
2035 __ bind(&exit_);
2036}
2037
2038
2039void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002041 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002042 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002043
Ben Murdoch257744e2011-11-30 15:57:28 +00002044 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002045 // Index is not a smi.
2046 __ bind(&index_not_smi_);
2047 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00002048 __ CheckMap(index_,
2049 factory->heap_number_map(),
2050 index_not_number_,
2051 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002052 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002053 if (embed_mode == PART_OF_IC_HANDLER) {
2054 __ Push(LoadWithVectorDescriptor::VectorRegister());
2055 __ Push(LoadDescriptor::SlotRegister());
2056 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 __ Push(object_);
2058 __ Push(index_); // Consumed by runtime conversion function.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002059 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002061 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002062 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002063 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002065 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002066 if (!index_.is(rax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002067 // Save the conversion result before the pop instructions below
2068 // have a chance to overwrite it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002069 __ movp(index_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002070 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002071 __ Pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002072 if (embed_mode == PART_OF_IC_HANDLER) {
2073 __ Pop(LoadDescriptor::SlotRegister());
2074 __ Pop(LoadWithVectorDescriptor::VectorRegister());
2075 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002076 // Reload the instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002077 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002078 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2079 call_helper.AfterCall(masm);
2080 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002081 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002082 // Otherwise, return to the fast path.
2083 __ jmp(&got_smi_index_);
2084
2085 // Call runtime. We get here when the receiver is a string and the
2086 // index is a number, but the code of getting the actual character
2087 // is too complex (e.g., when the string needs to be flattened).
2088 __ bind(&call_runtime_);
2089 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002090 __ Push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002091 __ Integer32ToSmi(index_, index_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002092 __ Push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002094 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002095 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002096 }
2097 call_helper.AfterCall(masm);
2098 __ jmp(&exit_);
2099
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002100 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002101}
2102
2103
2104// -------------------------------------------------------------------------
2105// StringCharFromCodeGenerator
2106
2107void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2108 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2109 __ JumpIfNotSmi(code_, &slow_case_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002111 __ j(above, &slow_case_);
2112
2113 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2114 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002115 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002116 FixedArray::kHeaderSize));
2117 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2118 __ j(equal, &slow_case_);
2119 __ bind(&exit_);
2120}
2121
2122
2123void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002124 MacroAssembler* masm,
2125 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002126 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002127
2128 __ bind(&slow_case_);
2129 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002130 __ Push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002131 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002132 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002133 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002134 }
2135 call_helper.AfterCall(masm);
2136 __ jmp(&exit_);
2137
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002138 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002139}
2140
2141
2142void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2143 Register dest,
2144 Register src,
2145 Register count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 String::Encoding encoding) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002147 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00002148 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002149 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002150 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002151
2152 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002154 STATIC_ASSERT(2 == sizeof(uc16));
2155 __ addl(count, count);
2156 }
2157
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002158 // Copy remaining characters.
2159 Label loop;
2160 __ bind(&loop);
2161 __ movb(kScratchRegister, Operand(src, 0));
2162 __ movb(Operand(dest, 0), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002163 __ incp(src);
2164 __ incp(dest);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002165 __ decl(count);
2166 __ j(not_zero, &loop);
2167
2168 __ bind(&done);
2169}
2170
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002171
2172void SubStringStub::Generate(MacroAssembler* masm) {
2173 Label runtime;
2174
2175 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002176 // rsp[0] : return address
2177 // rsp[8] : to
2178 // rsp[16] : from
2179 // rsp[24] : string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002180
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002181 enum SubStringStubArgumentIndices {
2182 STRING_ARGUMENT_INDEX,
2183 FROM_ARGUMENT_INDEX,
2184 TO_ARGUMENT_INDEX,
2185 SUB_STRING_ARGUMENT_COUNT
2186 };
2187
2188 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2189 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002190
2191 // Make sure first argument is a string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002192 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002193 STATIC_ASSERT(kSmiTag == 0);
2194 __ testl(rax, Immediate(kSmiTagMask));
2195 __ j(zero, &runtime);
2196 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2197 __ j(NegateCondition(is_string), &runtime);
2198
2199 // rax: string
2200 // rbx: instance type
2201 // Calculate length of sub string using the smi values.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002202 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2203 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
Ben Murdochf87a2032010-10-22 12:50:53 +01002204 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002205
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002206 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002207 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002208 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002209 // Shorter than original string's length: an actual substring.
2210 __ j(below, &not_original_string, Label::kNear);
2211 // Longer than original string's length or negative: unsafe arguments.
2212 __ j(above, &runtime);
2213 // Return original string.
2214 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002215 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002217 __ bind(&not_original_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002218
2219 Label single_char;
2220 __ SmiCompare(rcx, Smi::FromInt(1));
2221 __ j(equal, &single_char);
2222
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002223 __ SmiToInteger32(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002224
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002225 // rax: string
2226 // rbx: instance type
2227 // rcx: sub string length
2228 // rdx: from index (smi)
2229 // Deal with different string types: update the index if necessary
2230 // and put the underlying string into edi.
2231 Label underlying_unpacked, sliced_string, seq_or_external_string;
2232 // If the string is not indirect, it can only be sequential or external.
2233 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2234 STATIC_ASSERT(kIsIndirectStringMask != 0);
2235 __ testb(rbx, Immediate(kIsIndirectStringMask));
2236 __ j(zero, &seq_or_external_string, Label::kNear);
2237
2238 __ testb(rbx, Immediate(kSlicedNotConsMask));
2239 __ j(not_zero, &sliced_string, Label::kNear);
2240 // Cons string. Check whether it is flat, then fetch first part.
2241 // Flat cons strings have an empty second part.
2242 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002244 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002245 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002246 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002247 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002248 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002249 __ jmp(&underlying_unpacked, Label::kNear);
2250
2251 __ bind(&sliced_string);
2252 // Sliced string. Fetch parent and correct start index by offset.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002253 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
2254 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002255 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002256 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002257 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2258 __ jmp(&underlying_unpacked, Label::kNear);
2259
2260 __ bind(&seq_or_external_string);
2261 // Sequential or external string. Just move string to the correct register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002262 __ movp(rdi, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002263
2264 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002265
Ben Murdoch589d6972011-11-30 16:04:58 +00002266 if (FLAG_string_slices) {
2267 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002268 // rdi: underlying subject string
2269 // rbx: instance type of underlying subject string
2270 // rdx: adjusted start index (smi)
2271 // rcx: length
Ben Murdoch589d6972011-11-30 16:04:58 +00002272 // If coming from the make_two_character_string path, the string
2273 // is too short to be sliced anyways.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002274 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
Ben Murdoch589d6972011-11-30 16:04:58 +00002275 // Short slice. Copy instead of slicing.
2276 __ j(less, &copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002277 // Allocate new sliced string. At this point we do not reload the instance
2278 // type including the string encoding because we simply rely on the info
2279 // provided by the original string. It does not matter if the original
2280 // string's encoding is wrong because we always have to recheck encoding of
2281 // the newly created string's parent anyways due to externalized strings.
2282 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002283 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002284 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2285 __ testb(rbx, Immediate(kStringEncodingMask));
2286 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002288 __ jmp(&set_slice_header, Label::kNear);
2289 __ bind(&two_byte_slice);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002290 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002291 __ bind(&set_slice_header);
Ben Murdoch589d6972011-11-30 16:04:58 +00002292 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002293 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
2294 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00002295 Immediate(String::kEmptyHashField));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002296 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
2297 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002298 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002299 __ ret(3 * kPointerSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00002300
2301 __ bind(&copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002302 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002303
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002304 // rdi: underlying subject string
2305 // rbx: instance type of underlying subject string
2306 // rdx: adjusted start index (smi)
2307 // rcx: length
2308 // The subject string can only be external or sequential string of either
2309 // encoding at this point.
2310 Label two_byte_sequential, sequential_string;
2311 STATIC_ASSERT(kExternalStringTag != 0);
2312 STATIC_ASSERT(kSeqStringTag == 0);
2313 __ testb(rbx, Immediate(kExternalStringTag));
2314 __ j(zero, &sequential_string);
2315
2316 // Handle external string.
2317 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002319 __ testb(rbx, Immediate(kShortExternalStringMask));
2320 __ j(not_zero, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002322 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002323 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2324 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002325
2326 __ bind(&sequential_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002327 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002328 __ testb(rbx, Immediate(kStringEncodingMask));
2329 __ j(zero, &two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002330
2331 // Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002332 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002333
2334 // rax: result string
2335 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002336 { // Locate character of sub string start.
2337 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2339 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002340 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002341 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002342 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002343
2344 // rax: result string
2345 // rcx: result length
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346 // r14: first character of result
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002347 // rsi: character of sub string start
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348 StringHelper::GenerateCopyCharacters(
2349 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002350 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002351 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002352
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002353 __ bind(&two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002354 // Allocate the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002355 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002356
2357 // rax: result string
2358 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002359 { // Locate character of sub string start.
2360 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002361 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2362 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002363 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002364 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002365 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002366
2367 // rax: result string
2368 // rcx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002369 // rdi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002370 // r14: character of sub string start
2371 StringHelper::GenerateCopyCharacters(
2372 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002373 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002374 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002375
2376 // Just jump to runtime to create the sub string.
2377 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002378 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002379
2380 __ bind(&single_char);
2381 // rax: string
2382 // rbx: instance type
2383 // rcx: sub string length (smi)
2384 // rdx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002385 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
2386 &runtime, STRING_INDEX_IS_NUMBER,
2387 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002388 generator.GenerateFast(masm);
2389 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2390 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002391}
2392
2393
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002394void ToNumberStub::Generate(MacroAssembler* masm) {
2395 // The ToNumber stub takes one argument in rax.
2396 Label not_smi;
2397 __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
2398 __ Ret();
2399 __ bind(&not_smi);
2400
2401 Label not_heap_number;
2402 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
2403 Heap::kHeapNumberMapRootIndex);
2404 __ j(not_equal, &not_heap_number, Label::kNear);
2405 __ Ret();
2406 __ bind(&not_heap_number);
2407
Ben Murdochda12d292016-06-02 14:46:10 +01002408 NonNumberToNumberStub stub(masm->isolate());
2409 __ TailCallStub(&stub);
2410}
2411
2412void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
2413 // The NonNumberToNumber stub takes one argument in rax.
2414 __ AssertNotNumber(rax);
2415
2416 Label not_string;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002417 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2418 // rax: object
2419 // rdi: object map
2420 __ j(above_equal, &not_string, Label::kNear);
Ben Murdochda12d292016-06-02 14:46:10 +01002421 StringToNumberStub stub(masm->isolate());
2422 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002423 __ bind(&not_string);
2424
2425 Label not_oddball;
2426 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2427 __ j(not_equal, &not_oddball, Label::kNear);
2428 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2429 __ Ret();
2430 __ bind(&not_oddball);
2431
2432 __ PopReturnAddressTo(rcx); // Pop return address.
2433 __ Push(rax); // Push argument.
2434 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002435 __ TailCallRuntime(Runtime::kToNumber);
2436}
2437
Ben Murdochda12d292016-06-02 14:46:10 +01002438void StringToNumberStub::Generate(MacroAssembler* masm) {
2439 // The StringToNumber stub takes one argument in rax.
2440 __ AssertString(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441
Ben Murdochda12d292016-06-02 14:46:10 +01002442 // Check if string has a cached array index.
2443 Label runtime;
2444 __ testl(FieldOperand(rax, String::kHashFieldOffset),
2445 Immediate(String::kContainsCachedArrayIndexMask));
2446 __ j(not_zero, &runtime, Label::kNear);
2447 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
2448 __ IndexFromHash(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002450
Ben Murdochda12d292016-06-02 14:46:10 +01002451 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002452 __ PopReturnAddressTo(rcx); // Pop return address.
2453 __ Push(rax); // Push argument.
2454 __ PushReturnAddressFrom(rcx); // Push return address.
Ben Murdochda12d292016-06-02 14:46:10 +01002455 __ TailCallRuntime(Runtime::kStringToNumber);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002456}
2457
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002458void ToStringStub::Generate(MacroAssembler* masm) {
2459 // The ToString stub takes one argument in rax.
2460 Label is_number;
2461 __ JumpIfSmi(rax, &is_number, Label::kNear);
2462
2463 Label not_string;
2464 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2465 // rax: receiver
2466 // rdi: receiver map
2467 __ j(above_equal, &not_string, Label::kNear);
2468 __ Ret();
2469 __ bind(&not_string);
2470
2471 Label not_heap_number;
2472 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2473 __ j(not_equal, &not_heap_number, Label::kNear);
2474 __ bind(&is_number);
2475 NumberToStringStub stub(isolate());
2476 __ TailCallStub(&stub);
2477 __ bind(&not_heap_number);
2478
2479 Label not_oddball;
2480 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2481 __ j(not_equal, &not_oddball, Label::kNear);
2482 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2483 __ Ret();
2484 __ bind(&not_oddball);
2485
2486 __ PopReturnAddressTo(rcx); // Pop return address.
2487 __ Push(rax); // Push argument.
2488 __ PushReturnAddressFrom(rcx); // Push return address.
2489 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002490}
2491
2492
Ben Murdoch097c5b22016-05-18 11:27:45 +01002493void ToNameStub::Generate(MacroAssembler* masm) {
2494 // The ToName stub takes one argument in rax.
2495 Label is_number;
2496 __ JumpIfSmi(rax, &is_number, Label::kNear);
2497
2498 Label not_name;
2499 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2500 __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
2501 // rax: receiver
2502 // rdi: receiver map
2503 __ j(above, &not_name, Label::kNear);
2504 __ Ret();
2505 __ bind(&not_name);
2506
2507 Label not_heap_number;
2508 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2509 __ j(not_equal, &not_heap_number, Label::kNear);
2510 __ bind(&is_number);
2511 NumberToStringStub stub(isolate());
2512 __ TailCallStub(&stub);
2513 __ bind(&not_heap_number);
2514
2515 Label not_oddball;
2516 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2517 __ j(not_equal, &not_oddball, Label::kNear);
2518 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2519 __ Ret();
2520 __ bind(&not_oddball);
2521
2522 __ PopReturnAddressTo(rcx); // Pop return address.
2523 __ Push(rax); // Push argument.
2524 __ PushReturnAddressFrom(rcx); // Push return address.
2525 __ TailCallRuntime(Runtime::kToName);
2526}
2527
2528
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002529void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2530 Register left,
2531 Register right,
2532 Register scratch1,
2533 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002534 Register length = scratch1;
2535
2536 // Compare lengths.
2537 Label check_zero_length;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002538 __ movp(length, FieldOperand(left, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002539 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2540 __ j(equal, &check_zero_length, Label::kNear);
2541 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2542 __ ret(0);
2543
2544 // Check if the length is zero.
2545 Label compare_chars;
2546 __ bind(&check_zero_length);
2547 STATIC_ASSERT(kSmiTag == 0);
2548 __ SmiTest(length);
2549 __ j(not_zero, &compare_chars, Label::kNear);
2550 __ Move(rax, Smi::FromInt(EQUAL));
2551 __ ret(0);
2552
2553 // Compare characters.
2554 __ bind(&compare_chars);
2555 Label strings_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2557 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002558
2559 // Characters are equal.
2560 __ Move(rax, Smi::FromInt(EQUAL));
2561 __ ret(0);
2562
2563 // Characters are not equal.
2564 __ bind(&strings_not_equal);
2565 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2566 __ ret(0);
2567}
2568
2569
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002570void StringHelper::GenerateCompareFlatOneByteStrings(
2571 MacroAssembler* masm, Register left, Register right, Register scratch1,
2572 Register scratch2, Register scratch3, Register scratch4) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002573 // Ensure that you can always subtract a string length from a non-negative
2574 // number (e.g. another length).
2575 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
2576
2577 // Find minimum length and length difference.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002578 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
2579 __ movp(scratch4, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002580 __ SmiSub(scratch4,
2581 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002582 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002583 // Register scratch4 now holds left.length - right.length.
2584 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00002585 Label left_shorter;
2586 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002587 // The right string isn't longer that the left one.
2588 // Get the right string's length by subtracting the (non-negative) difference
2589 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002590 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002591 __ bind(&left_shorter);
2592 // Register scratch1 now holds Min(left.length, right.length).
2593 const Register min_length = scratch1;
2594
Ben Murdoch257744e2011-11-30 15:57:28 +00002595 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002596 // If min-length is zero, go directly to comparing lengths.
2597 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002598 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002599
Ben Murdoch257744e2011-11-30 15:57:28 +00002600 // Compare loop.
2601 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002602 GenerateOneByteCharsCompareLoop(
2603 masm, left, right, min_length, scratch2, &result_not_equal,
2604 // In debug-code mode, SmiTest below might push
2605 // the target label outside the near range.
2606 Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002607
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002608 // Completed loop without finding different characters.
2609 // Compare lengths (precomputed).
2610 __ bind(&compare_lengths);
2611 __ SmiTest(length_difference);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002612 Label length_not_equal;
2613 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002614
2615 // Result is EQUAL.
2616 __ Move(rax, Smi::FromInt(EQUAL));
2617 __ ret(0);
2618
Ben Murdoch257744e2011-11-30 15:57:28 +00002619 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002620 Label result_less;
2621 __ bind(&length_not_equal);
2622 __ j(greater, &result_greater, Label::kNear);
2623 __ jmp(&result_less, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002624 __ bind(&result_not_equal);
2625 // Unequal comparison of left to right, either character or length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002626 __ j(above, &result_greater, Label::kNear);
2627 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002628
2629 // Result is LESS.
2630 __ Move(rax, Smi::FromInt(LESS));
2631 __ ret(0);
2632
2633 // Result is GREATER.
2634 __ bind(&result_greater);
2635 __ Move(rax, Smi::FromInt(GREATER));
2636 __ ret(0);
2637}
2638
2639
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002640void StringHelper::GenerateOneByteCharsCompareLoop(
2641 MacroAssembler* masm, Register left, Register right, Register length,
2642 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002643 // Change index to run from -length to -1 by adding length to string
2644 // start. This means that loop ends when index reaches zero, which
2645 // doesn't need an additional compare.
2646 __ SmiToInteger32(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002647 __ leap(left,
2648 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2649 __ leap(right,
2650 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2651 __ negq(length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002652 Register index = length; // index = -length;
2653
2654 // Compare loop.
2655 Label loop;
2656 __ bind(&loop);
2657 __ movb(scratch, Operand(left, index, times_1, 0));
2658 __ cmpb(scratch, Operand(right, index, times_1, 0));
2659 __ j(not_equal, chars_not_equal, near_jump);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002660 __ incq(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00002661 __ j(not_zero, &loop);
2662}
2663
2664
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002665void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2666 // ----------- S t a t e -------------
2667 // -- rdx : left
2668 // -- rax : right
2669 // -- rsp[0] : return address
2670 // -----------------------------------
2671
2672 // Load rcx with the allocation site. We stick an undefined dummy value here
2673 // and replace it with the real allocation site later when we instantiate this
2674 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2675 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
2676
2677 // Make sure that we actually patched the allocation site.
2678 if (FLAG_debug_code) {
2679 __ testb(rcx, Immediate(kSmiTagMask));
2680 __ Assert(not_equal, kExpectedAllocationSite);
2681 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2682 isolate()->factory()->allocation_site_map());
2683 __ Assert(equal, kExpectedAllocationSite);
2684 }
2685
2686 // Tail call into the stub that handles binary operations with allocation
2687 // sites.
2688 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2689 __ TailCallStub(&stub);
2690}
2691
2692
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002693void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2694 DCHECK_EQ(CompareICState::BOOLEAN, state());
2695 Label miss;
2696 Label::Distance const miss_distance =
2697 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2698
2699 __ JumpIfSmi(rdx, &miss, miss_distance);
2700 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
2701 __ JumpIfSmi(rax, &miss, miss_distance);
2702 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2703 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2704 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002705 if (!Token::IsEqualityOp(op())) {
2706 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2707 __ AssertSmi(rax);
2708 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
2709 __ AssertSmi(rdx);
2710 __ pushq(rax);
2711 __ movq(rax, rdx);
2712 __ popq(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002713 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002714 __ subp(rax, rdx);
2715 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002716
2717 __ bind(&miss);
2718 GenerateMiss(masm);
2719}
2720
2721
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002722void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2723 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00002724 Label miss;
2725 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002726
2727 if (GetCondition() == equal) {
2728 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002729 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002730 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002731 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002732 __ subp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002733 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002734 // Correct sign of result in case of overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002735 __ notp(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002736 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002737 __ movp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002738 }
2739 __ ret(0);
2740
2741 __ bind(&miss);
2742 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002743}
2744
2745
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2747 DCHECK(state() == CompareICState::NUMBER);
Steve Block1e0659c2011-05-24 12:43:12 +01002748
Ben Murdoch257744e2011-11-30 15:57:28 +00002749 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002750 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00002751 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01002752
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002753 if (left() == CompareICState::SMI) {
2754 __ JumpIfNotSmi(rdx, &miss);
2755 }
2756 if (right() == CompareICState::SMI) {
2757 __ JumpIfNotSmi(rax, &miss);
2758 }
2759
2760 // Load left and right operand.
2761 Label done, left, left_smi, right_smi;
2762 __ JumpIfSmi(rax, &right_smi, Label::kNear);
2763 __ CompareMap(rax, isolate()->factory()->heap_number_map());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002764 __ j(not_equal, &maybe_undefined1, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002765 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002766 __ jmp(&left, Label::kNear);
2767 __ bind(&right_smi);
2768 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
2769 __ Cvtlsi2sd(xmm1, rcx);
Steve Block1e0659c2011-05-24 12:43:12 +01002770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002771 __ bind(&left);
2772 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2773 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2774 __ j(not_equal, &maybe_undefined2, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002775 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002776 __ jmp(&done);
2777 __ bind(&left_smi);
2778 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
2779 __ Cvtlsi2sd(xmm0, rcx);
2780
2781 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01002782 // Compare operands
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002783 __ Ucomisd(xmm0, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002784
2785 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00002786 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002787
2788 // Return a result of -1, 0, or 1, based on EFLAGS.
2789 // Performing mov, because xor would destroy the flag register.
2790 __ movl(rax, Immediate(0));
2791 __ movl(rcx, Immediate(0));
2792 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002793 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
Steve Block1e0659c2011-05-24 12:43:12 +01002794 __ ret(0);
2795
2796 __ bind(&unordered);
Steve Block1e0659c2011-05-24 12:43:12 +01002797 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002798 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002799 CompareICState::GENERIC, CompareICState::GENERIC);
Steve Block1e0659c2011-05-24 12:43:12 +01002800 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2801
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002802 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002803 if (Token::IsOrderedRelationalCompareOp(op())) {
2804 __ Cmp(rax, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002805 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002806 __ JumpIfSmi(rdx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002807 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
2808 __ j(not_equal, &maybe_undefined2, Label::kNear);
2809 __ jmp(&unordered);
2810 }
2811
2812 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002813 if (Token::IsOrderedRelationalCompareOp(op())) {
2814 __ Cmp(rdx, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002815 __ j(equal, &unordered);
2816 }
2817
Steve Block1e0659c2011-05-24 12:43:12 +01002818 __ bind(&miss);
2819 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002820}
2821
2822
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002823void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2824 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2825 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002826
2827 // Registers containing left and right operands respectively.
2828 Register left = rdx;
2829 Register right = rax;
2830 Register tmp1 = rcx;
2831 Register tmp2 = rbx;
2832
2833 // Check that both operands are heap objects.
2834 Label miss;
2835 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2836 __ j(cond, &miss, Label::kNear);
2837
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002838 // Check that both operands are internalized strings.
2839 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2840 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2841 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2842 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2843 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2844 __ orp(tmp1, tmp2);
2845 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2846 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002847
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002849 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002850 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002851 // Make sure rax is non-zero. At this point input operands are
2852 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 DCHECK(right.is(rax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002854 __ j(not_equal, &done, Label::kNear);
2855 STATIC_ASSERT(EQUAL == 0);
2856 STATIC_ASSERT(kSmiTag == 0);
2857 __ Move(rax, Smi::FromInt(EQUAL));
2858 __ bind(&done);
2859 __ ret(0);
2860
2861 __ bind(&miss);
2862 GenerateMiss(masm);
2863}
2864
2865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002866void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2867 DCHECK(state() == CompareICState::UNIQUE_NAME);
2868 DCHECK(GetCondition() == equal);
2869
2870 // Registers containing left and right operands respectively.
2871 Register left = rdx;
2872 Register right = rax;
2873 Register tmp1 = rcx;
2874 Register tmp2 = rbx;
2875
2876 // Check that both operands are heap objects.
2877 Label miss;
2878 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2879 __ j(cond, &miss, Label::kNear);
2880
2881 // Check that both operands are unique names. This leaves the instance
2882 // types loaded in tmp1 and tmp2.
2883 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2884 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2885 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2886 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2887
2888 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2889 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2890
2891 // Unique names are compared by identity.
2892 Label done;
2893 __ cmpp(left, right);
2894 // Make sure rax is non-zero. At this point input operands are
2895 // guaranteed to be non-zero.
2896 DCHECK(right.is(rax));
2897 __ j(not_equal, &done, Label::kNear);
2898 STATIC_ASSERT(EQUAL == 0);
2899 STATIC_ASSERT(kSmiTag == 0);
2900 __ Move(rax, Smi::FromInt(EQUAL));
2901 __ bind(&done);
2902 __ ret(0);
2903
2904 __ bind(&miss);
2905 GenerateMiss(masm);
2906}
2907
2908
2909void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2910 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00002911 Label miss;
2912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002914
Ben Murdoch257744e2011-11-30 15:57:28 +00002915 // Registers containing left and right operands respectively.
2916 Register left = rdx;
2917 Register right = rax;
2918 Register tmp1 = rcx;
2919 Register tmp2 = rbx;
2920 Register tmp3 = rdi;
2921
2922 // Check that both operands are heap objects.
2923 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2924 __ j(cond, &miss);
2925
2926 // Check that both operands are strings. This leaves the instance
2927 // types loaded in tmp1 and tmp2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002928 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2929 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2930 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2931 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2932 __ movp(tmp3, tmp1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002933 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002934 __ orp(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002935 __ testb(tmp3, Immediate(kIsNotStringMask));
2936 __ j(not_zero, &miss);
2937
2938 // Fast check for identical strings.
2939 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002940 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002941 __ j(not_equal, &not_same, Label::kNear);
2942 STATIC_ASSERT(EQUAL == 0);
2943 STATIC_ASSERT(kSmiTag == 0);
2944 __ Move(rax, Smi::FromInt(EQUAL));
2945 __ ret(0);
2946
2947 // Handle not identical strings.
2948 __ bind(&not_same);
2949
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002950 // Check that both strings are internalized strings. If they are, we're done
2951 // because we already know they are not identical. We also know they are both
2952 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002953 if (equality) {
2954 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002955 STATIC_ASSERT(kInternalizedTag == 0);
2956 __ orp(tmp1, tmp2);
2957 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
2958 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002959 // Make sure rax is non-zero. At this point input operands are
2960 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002961 DCHECK(right.is(rax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002962 __ ret(0);
2963 __ bind(&do_compare);
2964 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002965
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00002967 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002969
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002970 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002971 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2973 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002974 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002975 StringHelper::GenerateCompareFlatOneByteStrings(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002976 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
2977 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002978
2979 // Handle more complex cases in runtime.
2980 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002981 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01002982 {
2983 FrameScope scope(masm, StackFrame::INTERNAL);
2984 __ Push(left);
2985 __ Push(right);
2986 __ CallRuntime(Runtime::kStringEqual);
2987 }
2988 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
2989 __ subp(rax, rdx);
2990 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002991 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01002992 __ PopReturnAddressTo(tmp1);
2993 __ Push(left);
2994 __ Push(right);
2995 __ PushReturnAddressFrom(tmp1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002996 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002997 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002998
2999 __ bind(&miss);
3000 GenerateMiss(masm);
3001}
3002
3003
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003004void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3005 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00003006 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01003007 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003008 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003009
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003010 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3011 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
3012 __ j(below, &miss, Label::kNear);
3013 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
3014 __ j(below, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003015
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003016 DCHECK_EQ(equal, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01003018 __ ret(0);
3019
3020 __ bind(&miss);
3021 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003022}
3023
3024
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003025void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003026 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003027 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003028 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3029 __ j(either_smi, &miss, Label::kNear);
3030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003031 __ GetWeakValue(rdi, cell);
3032 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003033 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003034 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003035 __ j(not_equal, &miss, Label::kNear);
3036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003037 if (Token::IsEqualityOp(op())) {
3038 __ subp(rax, rdx);
3039 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003040 } else {
3041 __ PopReturnAddressTo(rcx);
3042 __ Push(rdx);
3043 __ Push(rax);
3044 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
3045 __ PushReturnAddressFrom(rcx);
3046 __ TailCallRuntime(Runtime::kCompare);
3047 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003048
3049 __ bind(&miss);
3050 GenerateMiss(masm);
3051}
3052
3053
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003054void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003055 {
3056 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003057 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003058 __ Push(rdx);
3059 __ Push(rax);
3060 __ Push(rdx);
3061 __ Push(rax);
3062 __ Push(Smi::FromInt(op()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003063 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003064
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003065 // Compute the entry point of the rewritten stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003066 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3067 __ Pop(rax);
3068 __ Pop(rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003069 }
Steve Block1e0659c2011-05-24 12:43:12 +01003070
Steve Block1e0659c2011-05-24 12:43:12 +01003071 // Do a tail call to the rewritten stub.
3072 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003073}
3074
Steve Block1e0659c2011-05-24 12:43:12 +01003075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003076void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3077 Label* miss,
3078 Label* done,
3079 Register properties,
3080 Handle<Name> name,
3081 Register r0) {
3082 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003083 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3084 // not equal to the name and kProbes-th slot is not used (its name is the
3085 // undefined value), it guarantees the hash table doesn't contain the
3086 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003087 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003088 for (int i = 0; i < kInlinedProbes; i++) {
3089 // r0 points to properties hash.
3090 // Compute the masked index: (hash + i + i * i) & mask.
3091 Register index = r0;
3092 // Capacity is smi 2^n.
3093 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3094 __ decl(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003095 __ andp(index,
3096 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003097
3098 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003099 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003100 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003101
3102 Register entity_name = r0;
3103 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003104 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003105 __ movp(entity_name, Operand(properties,
Ben Murdoch257744e2011-11-30 15:57:28 +00003106 index,
3107 times_pointer_size,
3108 kElementsStartOffset - kHeapObjectTag));
3109 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3110 __ j(equal, done);
3111
3112 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003113 __ Cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003114 __ j(equal, miss);
3115
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003116 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003117 // Check for the hole and skip.
3118 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003119 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003121 // Check if the entry name is not a unique name.
3122 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3123 __ JumpIfNotUniqueNameInstanceType(
3124 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3125 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003126 }
3127
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003128 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3129 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003130 __ Push(Handle<Object>(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003131 __ Push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003132 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003133 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003134 __ j(not_zero, miss);
3135 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003136}
3137
3138
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003139// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003140// |done| label if a property with the given name is found leaving the
3141// index into the dictionary in |r1|. Jump to the |miss| label
3142// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003143void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3144 Label* miss,
3145 Label* done,
3146 Register elements,
3147 Register name,
3148 Register r0,
3149 Register r1) {
3150 DCHECK(!elements.is(r0));
3151 DCHECK(!elements.is(r1));
3152 DCHECK(!name.is(r0));
3153 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003154
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003156
3157 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3158 __ decl(r0);
3159
3160 for (int i = 0; i < kInlinedProbes; i++) {
3161 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003162 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3163 __ shrl(r1, Immediate(Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003164 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003165 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003166 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003167 __ andp(r1, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003168
3169 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003170 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
Ben Murdoch257744e2011-11-30 15:57:28 +00003172
3173 // Check if the key is identical to the name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003174 __ cmpp(name, Operand(elements, r1, times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003175 kElementsStartOffset - kHeapObjectTag));
3176 __ j(equal, done);
3177 }
3178
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003179 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3180 POSITIVE_LOOKUP);
3181 __ Push(name);
3182 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3183 __ shrl(r0, Immediate(Name::kHashShift));
3184 __ Push(r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003185 __ CallStub(&stub);
3186
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003187 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003188 __ j(zero, miss);
3189 __ jmp(done);
3190}
3191
3192
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003193void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003194 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3195 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003196 // Stack frame on entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003197 // rsp[0 * kPointerSize] : return address.
3198 // rsp[1 * kPointerSize] : key's hash.
3199 // rsp[2 * kPointerSize] : key.
Ben Murdoch257744e2011-11-30 15:57:28 +00003200 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003201 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003202 // result_: used as scratch.
3203 // index_: will hold an index of entry if lookup is successful.
3204 // might alias with result_.
3205 // Returns:
3206 // result_ is zero if lookup failed, non zero otherwise.
3207
3208 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003210 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003211
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003213 __ decl(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214 __ Push(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003215
3216 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3217 // not equal to the name and kProbes-th slot is not used (its name is the
3218 // undefined value), it guarantees the hash table doesn't contain the
3219 // property. It's true even if some slots represent deleted properties
3220 // (their names are the null value).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003221 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3222 kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003223 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3224 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225 __ movp(scratch, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003226 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003228 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003229 __ andp(scratch, Operand(rsp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003230
3231 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003232 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003233 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003234
3235 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003236 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003237 kElementsStartOffset - kHeapObjectTag));
3238
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003239 __ Cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003240 __ j(equal, &not_in_dictionary);
3241
3242 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 __ cmpp(scratch, args.GetArgumentOperand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003244 __ j(equal, &in_dictionary);
3245
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003246 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3247 // If we hit a key that is not a unique name during negative
3248 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003249 // key we are looking for.
3250
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003251 // Check if the entry name is not a unique name.
3252 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3253 __ JumpIfNotUniqueNameInstanceType(
3254 FieldOperand(scratch, Map::kInstanceTypeOffset),
3255 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003256 }
3257 }
3258
3259 __ bind(&maybe_in_dictionary);
3260 // If we are doing negative lookup then probing failure should be
3261 // treated as a lookup success. For positive lookup probing failure
3262 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003263 if (mode() == POSITIVE_LOOKUP) {
3264 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003265 __ Drop(1);
3266 __ ret(2 * kPointerSize);
3267 }
3268
3269 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003270 __ movp(scratch, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003271 __ Drop(1);
3272 __ ret(2 * kPointerSize);
3273
3274 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003275 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003276 __ Drop(1);
3277 __ ret(2 * kPointerSize);
3278}
3279
3280
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003281void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3282 Isolate* isolate) {
3283 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3284 stub1.GetCode();
3285 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3286 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003287}
3288
3289
3290// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3291// the value has just been written into the object, now this stub makes sure
3292// we keep the GC informed. The word in the object where the value has been
3293// written is in the address register.
3294void RecordWriteStub::Generate(MacroAssembler* masm) {
3295 Label skip_to_incremental_noncompacting;
3296 Label skip_to_incremental_compacting;
3297
3298 // The first two instructions are generated with labels so as to get the
3299 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3300 // forth between a compare instructions (a nop in this position) and the
3301 // real branch when we start and stop incremental heap marking.
3302 // See RecordWriteStub::Patch for details.
3303 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3304 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3305
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003306 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3307 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003308 MacroAssembler::kReturnAtEnd);
3309 } else {
3310 __ ret(0);
3311 }
3312
3313 __ bind(&skip_to_incremental_noncompacting);
3314 GenerateIncremental(masm, INCREMENTAL);
3315
3316 __ bind(&skip_to_incremental_compacting);
3317 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3318
3319 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3320 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3321 masm->set_byte_at(0, kTwoByteNopInstruction);
3322 masm->set_byte_at(2, kFiveByteNopInstruction);
3323}
3324
3325
3326void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3327 regs_.Save(masm);
3328
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003329 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003330 Label dont_need_remembered_set;
3331
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003332 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003333 __ JumpIfNotInNewSpace(regs_.scratch0(),
3334 regs_.scratch0(),
3335 &dont_need_remembered_set);
3336
Ben Murdoch097c5b22016-05-18 11:27:45 +01003337 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3338 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003339
3340 // First notify the incremental marker if necessary, then update the
3341 // remembered set.
3342 CheckNeedsToInformIncrementalMarker(
3343 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003345 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003346 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003347 MacroAssembler::kReturnAtEnd);
3348
3349 __ bind(&dont_need_remembered_set);
3350 }
3351
3352 CheckNeedsToInformIncrementalMarker(
3353 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003354 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003355 regs_.Restore(masm);
3356 __ ret(0);
3357}
3358
3359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003360void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3361 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003362 Register address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003363 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
3364 DCHECK(!address.is(regs_.object()));
3365 DCHECK(!address.is(arg_reg_1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003366 __ Move(address, regs_.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003367 __ Move(arg_reg_1, regs_.object());
3368 // TODO(gc) Can we just set address arg2 in the beginning?
3369 __ Move(arg_reg_2, address);
3370 __ LoadAddress(arg_reg_3,
3371 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003372 int argument_count = 3;
3373
3374 AllowExternalCallThatCantCauseGC scope(masm);
3375 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003376 __ CallCFunction(
3377 ExternalReference::incremental_marking_record_write_function(isolate()),
3378 argument_count);
3379 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003380}
3381
3382
3383void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3384 MacroAssembler* masm,
3385 OnNoNeedToInformIncrementalMarker on_no_need,
3386 Mode mode) {
3387 Label on_black;
3388 Label need_incremental;
3389 Label need_incremental_pop_object;
3390
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003391 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3392 __ andp(regs_.scratch0(), regs_.object());
3393 __ movp(regs_.scratch1(),
3394 Operand(regs_.scratch0(),
3395 MemoryChunk::kWriteBarrierCounterOffset));
3396 __ subp(regs_.scratch1(), Immediate(1));
3397 __ movp(Operand(regs_.scratch0(),
3398 MemoryChunk::kWriteBarrierCounterOffset),
3399 regs_.scratch1());
3400 __ j(negative, &need_incremental);
3401
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003402 // Let's look at the color of the object: If it is not black we don't have
3403 // to inform the incremental marker.
3404 __ JumpIfBlack(regs_.object(),
3405 regs_.scratch0(),
3406 regs_.scratch1(),
3407 &on_black,
3408 Label::kNear);
3409
3410 regs_.Restore(masm);
3411 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003412 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003413 MacroAssembler::kReturnAtEnd);
3414 } else {
3415 __ ret(0);
3416 }
3417
3418 __ bind(&on_black);
3419
3420 // Get the value from the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003421 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003422
3423 if (mode == INCREMENTAL_COMPACTION) {
3424 Label ensure_not_white;
3425
3426 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3427 regs_.scratch1(), // Scratch.
3428 MemoryChunk::kEvacuationCandidateMask,
3429 zero,
3430 &ensure_not_white,
3431 Label::kNear);
3432
3433 __ CheckPageFlag(regs_.object(),
3434 regs_.scratch1(), // Scratch.
3435 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3436 zero,
3437 &need_incremental);
3438
3439 __ bind(&ensure_not_white);
3440 }
3441
3442 // We need an extra register for this, so we push the object register
3443 // temporarily.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003444 __ Push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003445 __ JumpIfWhite(regs_.scratch0(), // The value.
3446 regs_.scratch1(), // Scratch.
3447 regs_.object(), // Scratch.
3448 &need_incremental_pop_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003449 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003450
3451 regs_.Restore(masm);
3452 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003453 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003454 MacroAssembler::kReturnAtEnd);
3455 } else {
3456 __ ret(0);
3457 }
3458
3459 __ bind(&need_incremental_pop_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003460 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003461
3462 __ bind(&need_incremental);
3463
3464 // Fall through when we need to inform the incremental marker.
3465}
3466
3467
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003468void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3469 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3470 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3471 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003472 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003473 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
3474 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3475 __ PopReturnAddressTo(rcx);
3476 int additional_offset =
3477 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3478 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
3479 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
3480}
3481
3482
3483void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003484 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3485 LoadICStub stub(isolate(), state());
3486 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003487}
3488
3489
3490void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003491 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3492 KeyedLoadICStub stub(isolate(), state());
3493 stub.GenerateForTrampoline(masm);
3494}
3495
3496
3497static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3498 Register receiver_map, Register scratch1,
3499 Register scratch2, Register scratch3,
3500 bool is_polymorphic, Label* miss) {
3501 // feedback initially contains the feedback array
3502 Label next_loop, prepare_next;
3503 Label start_polymorphic;
3504
3505 Register counter = scratch1;
3506 Register length = scratch2;
3507 Register cached_map = scratch3;
3508
3509 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3510 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3511 __ j(not_equal, &start_polymorphic);
3512
3513 // found, now call handler.
3514 Register handler = feedback;
3515 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3516 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3517 __ jmp(handler);
3518
3519 // Polymorphic, we have to loop from 2 to N
3520 __ bind(&start_polymorphic);
3521 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3522 if (!is_polymorphic) {
3523 // If the IC could be monomorphic we have to make sure we don't go past the
3524 // end of the feedback array.
3525 __ cmpl(length, Immediate(2));
3526 __ j(equal, miss);
3527 }
3528 __ movl(counter, Immediate(2));
3529
3530 __ bind(&next_loop);
3531 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3532 FixedArray::kHeaderSize));
3533 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3534 __ j(not_equal, &prepare_next);
3535 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
3536 FixedArray::kHeaderSize + kPointerSize));
3537 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3538 __ jmp(handler);
3539
3540 __ bind(&prepare_next);
3541 __ addl(counter, Immediate(2));
3542 __ cmpl(counter, length);
3543 __ j(less, &next_loop);
3544
3545 // We exhausted our array of map handler pairs.
3546 __ jmp(miss);
3547}
3548
3549
3550static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3551 Register receiver_map, Register feedback,
3552 Register vector, Register integer_slot,
3553 Label* compare_map, Label* load_smi_map,
3554 Label* try_array) {
3555 __ JumpIfSmi(receiver, load_smi_map);
3556 __ movp(receiver_map, FieldOperand(receiver, 0));
3557
3558 __ bind(compare_map);
3559 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
3560 __ j(not_equal, try_array);
3561 Register handler = feedback;
3562 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
3563 FixedArray::kHeaderSize + kPointerSize));
3564 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3565 __ jmp(handler);
3566}
3567
3568
3569void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3570
3571
3572void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3573 GenerateImpl(masm, true);
3574}
3575
3576
3577void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3578 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3579 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx
3580 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3581 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3582 Register feedback = rdi;
3583 Register integer_slot = r8;
3584 Register receiver_map = r9;
3585
3586 __ SmiToInteger32(integer_slot, slot);
3587 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3588 FixedArray::kHeaderSize));
3589
3590 // Try to quickly handle the monomorphic case without knowing for sure
3591 // if we have a weak cell in feedback. We do know it's safe to look
3592 // at WeakCell::kValueOffset.
3593 Label try_array, load_smi_map, compare_map;
3594 Label not_array, miss;
3595 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3596 integer_slot, &compare_map, &load_smi_map, &try_array);
3597
3598 // Is it a fixed array?
3599 __ bind(&try_array);
3600 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3601 __ j(not_equal, &not_array);
3602 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3603 &miss);
3604
3605 __ bind(&not_array);
3606 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3607 __ j(not_equal, &miss);
Ben Murdochc5610432016-08-08 18:44:38 +01003608 Code::Flags code_flags =
3609 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003610 masm->isolate()->stub_cache()->GenerateProbe(
3611 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
3612
3613 __ bind(&miss);
3614 LoadIC::GenerateMiss(masm);
3615
3616 __ bind(&load_smi_map);
3617 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3618 __ jmp(&compare_map);
3619}
3620
3621
3622void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3623 GenerateImpl(masm, false);
3624}
3625
3626
3627void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3628 GenerateImpl(masm, true);
3629}
3630
3631
3632void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3633 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3634 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx
3635 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3636 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3637 Register feedback = rdi;
3638 Register integer_slot = r8;
3639 Register receiver_map = r9;
3640
3641 __ SmiToInteger32(integer_slot, slot);
3642 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3643 FixedArray::kHeaderSize));
3644
3645 // Try to quickly handle the monomorphic case without knowing for sure
3646 // if we have a weak cell in feedback. We do know it's safe to look
3647 // at WeakCell::kValueOffset.
3648 Label try_array, load_smi_map, compare_map;
3649 Label not_array, miss;
3650 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3651 integer_slot, &compare_map, &load_smi_map, &try_array);
3652
3653 __ bind(&try_array);
3654 // Is it a fixed array?
3655 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3656 __ j(not_equal, &not_array);
3657
3658 // We have a polymorphic element handler.
3659 Label polymorphic, try_poly_name;
3660 __ bind(&polymorphic);
3661 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3662 &miss);
3663
3664 __ bind(&not_array);
3665 // Is it generic?
3666 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3667 __ j(not_equal, &try_poly_name);
3668 Handle<Code> megamorphic_stub =
3669 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3670 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3671
3672 __ bind(&try_poly_name);
3673 // We might have a name in feedback, and a fixed array in the next slot.
3674 __ cmpp(key, feedback);
3675 __ j(not_equal, &miss);
3676 // If the name comparison succeeded, we know we have a fixed array with
3677 // at least one map/handler pair.
3678 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3679 FixedArray::kHeaderSize + kPointerSize));
3680 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false,
3681 &miss);
3682
3683 __ bind(&miss);
3684 KeyedLoadIC::GenerateMiss(masm);
3685
3686 __ bind(&load_smi_map);
3687 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3688 __ jmp(&compare_map);
3689}
3690
3691
3692void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3693 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3694 VectorStoreICStub stub(isolate(), state());
3695 stub.GenerateForTrampoline(masm);
3696}
3697
3698
3699void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3700 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3701 VectorKeyedStoreICStub stub(isolate(), state());
3702 stub.GenerateForTrampoline(masm);
3703}
3704
3705
3706void VectorStoreICStub::Generate(MacroAssembler* masm) {
3707 GenerateImpl(masm, false);
3708}
3709
3710
3711void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3712 GenerateImpl(masm, true);
3713}
3714
3715
3716void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3717 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3718 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3719 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3720 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3721 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3722 Register feedback = r8;
3723 Register integer_slot = r9;
3724 Register receiver_map = r11;
3725 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3726
3727 __ SmiToInteger32(integer_slot, slot);
3728 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3729 FixedArray::kHeaderSize));
3730
3731 // Try to quickly handle the monomorphic case without knowing for sure
3732 // if we have a weak cell in feedback. We do know it's safe to look
3733 // at WeakCell::kValueOffset.
3734 Label try_array, load_smi_map, compare_map;
3735 Label not_array, miss;
3736 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3737 integer_slot, &compare_map, &load_smi_map, &try_array);
3738
3739 // Is it a fixed array?
3740 __ bind(&try_array);
3741 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3742 __ j(not_equal, &not_array);
3743 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true,
3744 &miss);
3745
3746 __ bind(&not_array);
3747 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3748 __ j(not_equal, &miss);
3749
Ben Murdochc5610432016-08-08 18:44:38 +01003750 Code::Flags code_flags =
3751 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003752 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3753 receiver, key, feedback, no_reg);
3754
3755 __ bind(&miss);
3756 StoreIC::GenerateMiss(masm);
3757
3758 __ bind(&load_smi_map);
3759 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3760 __ jmp(&compare_map);
3761}
3762
3763
3764void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3765 GenerateImpl(masm, false);
3766}
3767
3768
3769void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3770 GenerateImpl(masm, true);
3771}
3772
3773
3774static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3775 Register receiver_map,
3776 Register feedback, Register scratch,
3777 Register scratch1,
3778 Register scratch2, Label* miss) {
3779 // feedback initially contains the feedback array
3780 Label next, next_loop, prepare_next;
3781 Label transition_call;
3782
3783 Register cached_map = scratch;
3784 Register counter = scratch1;
3785 Register length = scratch2;
3786
3787 // Polymorphic, we have to loop from 0 to N - 1
3788 __ movp(counter, Immediate(0));
3789 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3790 __ SmiToInteger32(length, length);
3791
3792 __ bind(&next_loop);
3793 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3794 FixedArray::kHeaderSize));
3795 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3796 __ j(not_equal, &prepare_next);
3797 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3798 FixedArray::kHeaderSize + kPointerSize));
3799 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3800 __ j(not_equal, &transition_call);
3801 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3802 FixedArray::kHeaderSize + 2 * kPointerSize));
3803 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3804 __ jmp(feedback);
3805
3806 __ bind(&transition_call);
3807 DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3808 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3809 // The weak cell may have been cleared.
3810 __ JumpIfSmi(receiver_map, miss);
3811 // Get the handler in value.
3812 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3813 FixedArray::kHeaderSize + 2 * kPointerSize));
3814 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3815 __ jmp(feedback);
3816
3817 __ bind(&prepare_next);
3818 __ addl(counter, Immediate(3));
3819 __ cmpl(counter, length);
3820 __ j(less, &next_loop);
3821
3822 // We exhausted our array of map handler pairs.
3823 __ jmp(miss);
3824}
3825
3826
3827void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3828 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3829 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3830 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3831 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3832 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3833 Register feedback = r8;
3834 Register integer_slot = r9;
3835 Register receiver_map = r11;
3836 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3837
3838 __ SmiToInteger32(integer_slot, slot);
3839 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3840 FixedArray::kHeaderSize));
3841
3842 // Try to quickly handle the monomorphic case without knowing for sure
3843 // if we have a weak cell in feedback. We do know it's safe to look
3844 // at WeakCell::kValueOffset.
3845 Label try_array, load_smi_map, compare_map;
3846 Label not_array, miss;
3847 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3848 integer_slot, &compare_map, &load_smi_map, &try_array);
3849
3850 // Is it a fixed array?
3851 __ bind(&try_array);
3852 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3853 __ j(not_equal, &not_array);
3854 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
3855 r15, r14, &miss);
3856
3857 __ bind(&not_array);
3858 Label try_poly_name;
3859 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3860 __ j(not_equal, &try_poly_name);
3861
3862 Handle<Code> megamorphic_stub =
3863 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3864 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3865
3866 __ bind(&try_poly_name);
3867 // We might have a name in feedback, and a fixed array in the next slot.
3868 __ cmpp(key, feedback);
3869 __ j(not_equal, &miss);
3870 // If the name comparison succeeded, we know we have a fixed array with
3871 // at least one map/handler pair.
3872 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3873 FixedArray::kHeaderSize + kPointerSize));
3874 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
3875 &miss);
3876
3877 __ bind(&miss);
3878 KeyedStoreIC::GenerateMiss(masm);
3879
3880 __ bind(&load_smi_map);
3881 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3882 __ jmp(&compare_map);
3883}
3884
3885
3886void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3887 __ EmitLoadTypeFeedbackVector(rbx);
3888 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003889 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3890}
3891
3892
3893void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3894 if (masm->isolate()->function_entry_hook() != NULL) {
3895 ProfileEntryHookStub stub(masm->isolate());
3896 masm->CallStub(&stub);
3897 }
3898}
3899
3900
3901void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3902 // This stub can be called from essentially anywhere, so it needs to save
3903 // all volatile and callee-save registers.
3904 const size_t kNumSavedRegisters = 2;
3905 __ pushq(arg_reg_1);
3906 __ pushq(arg_reg_2);
3907
3908 // Calculate the original stack pointer and store it in the second arg.
3909 __ leap(arg_reg_2,
3910 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
3911
3912 // Calculate the function address to the first arg.
3913 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
3914 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
3915
3916 // Save the remainder of the volatile registers.
3917 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3918
3919 // Call the entry hook function.
3920 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
3921 Assembler::RelocInfoNone());
3922
3923 AllowExternalCallThatCantCauseGC scope(masm);
3924
3925 const int kArgumentCount = 2;
3926 __ PrepareCallCFunction(kArgumentCount);
3927 __ CallCFunction(rax, kArgumentCount);
3928
3929 // Restore volatile regs.
3930 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3931 __ popq(arg_reg_2);
3932 __ popq(arg_reg_1);
3933
3934 __ Ret();
3935}
3936
3937
3938template<class T>
3939static void CreateArrayDispatch(MacroAssembler* masm,
3940 AllocationSiteOverrideMode mode) {
3941 if (mode == DISABLE_ALLOCATION_SITES) {
3942 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
3943 __ TailCallStub(&stub);
3944 } else if (mode == DONT_OVERRIDE) {
3945 int last_index = GetSequenceIndexFromFastElementsKind(
3946 TERMINAL_FAST_ELEMENTS_KIND);
3947 for (int i = 0; i <= last_index; ++i) {
3948 Label next;
3949 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3950 __ cmpl(rdx, Immediate(kind));
3951 __ j(not_equal, &next);
3952 T stub(masm->isolate(), kind);
3953 __ TailCallStub(&stub);
3954 __ bind(&next);
3955 }
3956
3957 // If we reached this point there is a problem.
3958 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3959 } else {
3960 UNREACHABLE();
3961 }
3962}
3963
3964
3965static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3966 AllocationSiteOverrideMode mode) {
3967 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3968 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
3969 // rax - number of arguments
3970 // rdi - constructor?
3971 // rsp[0] - return address
3972 // rsp[8] - last argument
3973 Handle<Object> undefined_sentinel(
3974 masm->isolate()->heap()->undefined_value(),
3975 masm->isolate());
3976
3977 Label normal_sequence;
3978 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003979 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3980 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3981 STATIC_ASSERT(FAST_ELEMENTS == 2);
3982 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3983 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3984 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003985
3986 // is the low bit set? If so, we are holey and that is good.
3987 __ testb(rdx, Immediate(1));
3988 __ j(not_zero, &normal_sequence);
3989 }
3990
3991 // look at the first argument
3992 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3993 __ movp(rcx, args.GetArgumentOperand(0));
3994 __ testp(rcx, rcx);
3995 __ j(zero, &normal_sequence);
3996
3997 if (mode == DISABLE_ALLOCATION_SITES) {
3998 ElementsKind initial = GetInitialFastElementsKind();
3999 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4000
4001 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4002 holey_initial,
4003 DISABLE_ALLOCATION_SITES);
4004 __ TailCallStub(&stub_holey);
4005
4006 __ bind(&normal_sequence);
4007 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4008 initial,
4009 DISABLE_ALLOCATION_SITES);
4010 __ TailCallStub(&stub);
4011 } else if (mode == DONT_OVERRIDE) {
4012 // We are going to create a holey array, but our kind is non-holey.
4013 // Fix kind and retry (only if we have an allocation site in the slot).
4014 __ incl(rdx);
4015
4016 if (FLAG_debug_code) {
4017 Handle<Map> allocation_site_map =
4018 masm->isolate()->factory()->allocation_site_map();
4019 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4020 __ Assert(equal, kExpectedAllocationSite);
4021 }
4022
4023 // Save the resulting elements kind in type info. We can't just store r3
4024 // in the AllocationSite::transition_info field because elements kind is
4025 // restricted to a portion of the field...upper bits need to be left alone.
4026 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4027 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4028 Smi::FromInt(kFastElementsKindPackedToHoley));
4029
4030 __ bind(&normal_sequence);
4031 int last_index = GetSequenceIndexFromFastElementsKind(
4032 TERMINAL_FAST_ELEMENTS_KIND);
4033 for (int i = 0; i <= last_index; ++i) {
4034 Label next;
4035 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4036 __ cmpl(rdx, Immediate(kind));
4037 __ j(not_equal, &next);
4038 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4039 __ TailCallStub(&stub);
4040 __ bind(&next);
4041 }
4042
4043 // If we reached this point there is a problem.
4044 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4045 } else {
4046 UNREACHABLE();
4047 }
4048}
4049
4050
4051template<class T>
4052static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4053 int to_index = GetSequenceIndexFromFastElementsKind(
4054 TERMINAL_FAST_ELEMENTS_KIND);
4055 for (int i = 0; i <= to_index; ++i) {
4056 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4057 T stub(isolate, kind);
4058 stub.GetCode();
4059 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4060 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4061 stub1.GetCode();
4062 }
4063 }
4064}
4065
4066
4067void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4068 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4069 isolate);
4070 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4071 isolate);
4072 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4073 isolate);
4074}
4075
4076
4077void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4078 Isolate* isolate) {
4079 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4080 for (int i = 0; i < 2; i++) {
4081 // For internal arrays we only need a few things
4082 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4083 stubh1.GetCode();
4084 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4085 stubh2.GetCode();
4086 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4087 stubh3.GetCode();
4088 }
4089}
4090
4091
4092void ArrayConstructorStub::GenerateDispatchToArrayStub(
4093 MacroAssembler* masm,
4094 AllocationSiteOverrideMode mode) {
4095 if (argument_count() == ANY) {
4096 Label not_zero_case, not_one_case;
4097 __ testp(rax, rax);
4098 __ j(not_zero, &not_zero_case);
4099 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4100
4101 __ bind(&not_zero_case);
4102 __ cmpl(rax, Immediate(1));
4103 __ j(greater, &not_one_case);
4104 CreateArrayDispatchOneArgument(masm, mode);
4105
4106 __ bind(&not_one_case);
4107 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4108 } else if (argument_count() == NONE) {
4109 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4110 } else if (argument_count() == ONE) {
4111 CreateArrayDispatchOneArgument(masm, mode);
4112 } else if (argument_count() == MORE_THAN_ONE) {
4113 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4114 } else {
4115 UNREACHABLE();
4116 }
4117}
4118
4119
4120void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4121 // ----------- S t a t e -------------
4122 // -- rax : argc
4123 // -- rbx : AllocationSite or undefined
4124 // -- rdi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004125 // -- rdx : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004126 // -- rsp[0] : return address
4127 // -- rsp[8] : last argument
4128 // -----------------------------------
4129 if (FLAG_debug_code) {
4130 // The array construct code is only set for the global and natives
4131 // builtin Array functions which always have maps.
4132
4133 // Initial map for the builtin Array function should be a map.
4134 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4135 // Will both indicate a NULL and a Smi.
4136 STATIC_ASSERT(kSmiTag == 0);
4137 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4138 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4139 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4140 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4141
4142 // We should either have undefined in rbx or a valid AllocationSite
4143 __ AssertUndefinedOrAllocationSite(rbx);
4144 }
4145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004146 // Enter the context of the Array function.
4147 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
4148
4149 Label subclassing;
4150 __ cmpp(rdi, rdx);
4151 __ j(not_equal, &subclassing);
4152
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004153 Label no_info;
4154 // If the feedback vector is the undefined value call an array constructor
4155 // that doesn't use AllocationSites.
4156 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4157 __ j(equal, &no_info);
4158
4159 // Only look at the lower 16 bits of the transition info.
4160 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4161 __ SmiToInteger32(rdx, rdx);
4162 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4163 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4164 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4165
4166 __ bind(&no_info);
4167 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004168
4169 // Subclassing
4170 __ bind(&subclassing);
4171 switch (argument_count()) {
4172 case ANY:
4173 case MORE_THAN_ONE: {
4174 StackArgumentsAccessor args(rsp, rax);
4175 __ movp(args.GetReceiverOperand(), rdi);
4176 __ addp(rax, Immediate(3));
4177 break;
4178 }
4179 case NONE: {
4180 StackArgumentsAccessor args(rsp, 0);
4181 __ movp(args.GetReceiverOperand(), rdi);
4182 __ Set(rax, 3);
4183 break;
4184 }
4185 case ONE: {
4186 StackArgumentsAccessor args(rsp, 1);
4187 __ movp(args.GetReceiverOperand(), rdi);
4188 __ Set(rax, 4);
4189 break;
4190 }
4191 }
4192 __ PopReturnAddressTo(rcx);
4193 __ Push(rdx);
4194 __ Push(rbx);
4195 __ PushReturnAddressFrom(rcx);
4196 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004197}
4198
4199
4200void InternalArrayConstructorStub::GenerateCase(
4201 MacroAssembler* masm, ElementsKind kind) {
4202 Label not_zero_case, not_one_case;
4203 Label normal_sequence;
4204
4205 __ testp(rax, rax);
4206 __ j(not_zero, &not_zero_case);
4207 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4208 __ TailCallStub(&stub0);
4209
4210 __ bind(&not_zero_case);
4211 __ cmpl(rax, Immediate(1));
4212 __ j(greater, &not_one_case);
4213
4214 if (IsFastPackedElementsKind(kind)) {
4215 // We might need to create a holey array
4216 // look at the first argument
4217 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4218 __ movp(rcx, args.GetArgumentOperand(0));
4219 __ testp(rcx, rcx);
4220 __ j(zero, &normal_sequence);
4221
4222 InternalArraySingleArgumentConstructorStub
4223 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4224 __ TailCallStub(&stub1_holey);
4225 }
4226
4227 __ bind(&normal_sequence);
4228 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4229 __ TailCallStub(&stub1);
4230
4231 __ bind(&not_one_case);
4232 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4233 __ TailCallStub(&stubN);
4234}
4235
4236
4237void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4238 // ----------- S t a t e -------------
4239 // -- rax : argc
4240 // -- rdi : constructor
4241 // -- rsp[0] : return address
4242 // -- rsp[8] : last argument
4243 // -----------------------------------
4244
4245 if (FLAG_debug_code) {
4246 // The array construct code is only set for the global and natives
4247 // builtin Array functions which always have maps.
4248
4249 // Initial map for the builtin Array function should be a map.
4250 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4251 // Will both indicate a NULL and a Smi.
4252 STATIC_ASSERT(kSmiTag == 0);
4253 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4254 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4255 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4256 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4257 }
4258
4259 // Figure out the right elements kind
4260 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4261
4262 // Load the map's "bit field 2" into |result|. We only need the first byte,
4263 // but the following masking takes care of that anyway.
4264 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4265 // Retrieve elements_kind from bit field 2.
4266 __ DecodeField<Map::ElementsKindBits>(rcx);
4267
4268 if (FLAG_debug_code) {
4269 Label done;
4270 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4271 __ j(equal, &done);
4272 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4273 __ Assert(equal,
4274 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4275 __ bind(&done);
4276 }
4277
4278 Label fast_elements_case;
4279 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4280 __ j(equal, &fast_elements_case);
4281 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4282
4283 __ bind(&fast_elements_case);
4284 GenerateCase(masm, FAST_ELEMENTS);
4285}
4286
4287
Ben Murdoch097c5b22016-05-18 11:27:45 +01004288void FastNewObjectStub::Generate(MacroAssembler* masm) {
4289 // ----------- S t a t e -------------
4290 // -- rdi : target
4291 // -- rdx : new target
4292 // -- rsi : context
4293 // -- rsp[0] : return address
4294 // -----------------------------------
4295 __ AssertFunction(rdi);
4296 __ AssertReceiver(rdx);
4297
4298 // Verify that the new target is a JSFunction.
4299 Label new_object;
4300 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
4301 __ j(not_equal, &new_object);
4302
4303 // Load the initial map and verify that it's in fact a map.
4304 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
4305 __ JumpIfSmi(rcx, &new_object);
4306 __ CmpObjectType(rcx, MAP_TYPE, rbx);
4307 __ j(not_equal, &new_object);
4308
4309 // Fall back to runtime if the target differs from the new target's
4310 // initial map constructor.
4311 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
4312 __ j(not_equal, &new_object);
4313
4314 // Allocate the JSObject on the heap.
4315 Label allocate, done_allocate;
4316 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4317 __ leal(rbx, Operand(rbx, times_pointer_size, 0));
4318 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4319 __ bind(&done_allocate);
4320
4321 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004322 __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004323 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004324 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
4325 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004326 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004327 __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004328
4329 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004330 // -- rax : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004331 // -- rbx : result fields (untagged)
4332 // -- rdi : result end (untagged)
4333 // -- rcx : initial map
4334 // -- rsi : context
4335 // -- rsp[0] : return address
4336 // -----------------------------------
4337
4338 // Perform in-object slack tracking if requested.
4339 Label slack_tracking;
4340 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4341 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
4342 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4343 Immediate(Map::ConstructionCounter::kMask));
4344 __ j(not_zero, &slack_tracking, Label::kNear);
4345 {
4346 // Initialize all in-object fields with undefined.
4347 __ InitializeFieldsWithFiller(rbx, rdi, r11);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004348 __ Ret();
4349 }
4350 __ bind(&slack_tracking);
4351 {
4352 // Decrease generous allocation count.
4353 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4354 __ subl(FieldOperand(rcx, Map::kBitField3Offset),
4355 Immediate(1 << Map::ConstructionCounter::kShift));
4356
4357 // Initialize the in-object fields with undefined.
4358 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
4359 __ negp(rdx);
4360 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
4361 __ InitializeFieldsWithFiller(rbx, rdx, r11);
4362
4363 // Initialize the remaining (reserved) fields with one pointer filler map.
4364 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
4365 __ InitializeFieldsWithFiller(rdx, rdi, r11);
4366
Ben Murdoch097c5b22016-05-18 11:27:45 +01004367 // Check if we can finalize the instance size.
4368 Label finalize;
4369 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4370 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4371 Immediate(Map::ConstructionCounter::kMask));
4372 __ j(zero, &finalize, Label::kNear);
4373 __ Ret();
4374
4375 // Finalize the instance size.
4376 __ bind(&finalize);
4377 {
4378 FrameScope scope(masm, StackFrame::INTERNAL);
4379 __ Push(rax);
4380 __ Push(rcx);
4381 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4382 __ Pop(rax);
4383 }
4384 __ Ret();
4385 }
4386
4387 // Fall back to %AllocateInNewSpace.
4388 __ bind(&allocate);
4389 {
4390 FrameScope scope(masm, StackFrame::INTERNAL);
4391 __ Integer32ToSmi(rbx, rbx);
4392 __ Push(rcx);
4393 __ Push(rbx);
4394 __ CallRuntime(Runtime::kAllocateInNewSpace);
4395 __ Pop(rcx);
4396 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004397 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4398 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01004399 STATIC_ASSERT(kHeapObjectTag == 1);
4400 __ decp(rdi); // Remove the tag from the end address.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004401 __ jmp(&done_allocate);
4402
4403 // Fall back to %NewObject.
4404 __ bind(&new_object);
4405 __ PopReturnAddressTo(rcx);
4406 __ Push(rdi);
4407 __ Push(rdx);
4408 __ PushReturnAddressFrom(rcx);
4409 __ TailCallRuntime(Runtime::kNewObject);
4410}
4411
4412
4413void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4414 // ----------- S t a t e -------------
4415 // -- rdi : function
4416 // -- rsi : context
4417 // -- rbp : frame pointer
4418 // -- rsp[0] : return address
4419 // -----------------------------------
4420 __ AssertFunction(rdi);
4421
Ben Murdochc5610432016-08-08 18:44:38 +01004422 // Make rdx point to the JavaScript frame.
4423 __ movp(rdx, rbp);
4424 if (skip_stub_frame()) {
4425 // For Ignition we need to skip the handler/stub frame to reach the
4426 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004427 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004428 }
4429 if (FLAG_debug_code) {
4430 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004431 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004432 __ j(equal, &ok);
4433 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4434 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004435 }
4436
4437 // Check if we have rest parameters (only possible if we have an
4438 // arguments adaptor frame below the function frame).
4439 Label no_rest_parameters;
4440 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004441 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004442 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4443 __ j(not_equal, &no_rest_parameters, Label::kNear);
4444
4445 // Check if the arguments adaptor frame contains more arguments than
4446 // specified by the function's internal formal parameter count.
4447 Label rest_parameters;
4448 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4449 __ LoadSharedFunctionInfoSpecialField(
4450 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4451 __ SmiToInteger32(
4452 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4453 __ subl(rax, rcx);
4454 __ j(greater, &rest_parameters);
4455
4456 // Return an empty rest parameter array.
4457 __ bind(&no_rest_parameters);
4458 {
4459 // ----------- S t a t e -------------
4460 // -- rsi : context
4461 // -- rsp[0] : return address
4462 // -----------------------------------
4463
4464 // Allocate an empty rest parameter array.
4465 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004466 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004467 __ bind(&done_allocate);
4468
4469 // Setup the rest parameter array in rax.
4470 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4471 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4472 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4473 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4474 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
4475 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
4476 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4477 __ Ret();
4478
4479 // Fall back to %AllocateInNewSpace.
4480 __ bind(&allocate);
4481 {
4482 FrameScope scope(masm, StackFrame::INTERNAL);
4483 __ Push(Smi::FromInt(JSArray::kSize));
4484 __ CallRuntime(Runtime::kAllocateInNewSpace);
4485 }
4486 __ jmp(&done_allocate);
4487 }
4488
4489 __ bind(&rest_parameters);
4490 {
4491 // Compute the pointer to the first rest parameter (skippping the receiver).
4492 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4493 StandardFrameConstants::kCallerSPOffset -
4494 1 * kPointerSize));
4495
4496 // ----------- S t a t e -------------
4497 // -- rsi : context
4498 // -- rax : number of rest parameters
4499 // -- rbx : pointer to first rest parameters
4500 // -- rsp[0] : return address
4501 // -----------------------------------
4502
4503 // Allocate space for the rest parameter array plus the backing store.
4504 Label allocate, done_allocate;
4505 __ leal(rcx, Operand(rax, times_pointer_size,
4506 JSArray::kSize + FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01004507 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004508 __ bind(&done_allocate);
4509
4510 // Compute the arguments.length in rdi.
4511 __ Integer32ToSmi(rdi, rax);
4512
4513 // Setup the elements array in rdx.
4514 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4515 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4516 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4517 {
4518 Label loop, done_loop;
4519 __ Set(rcx, 0);
4520 __ bind(&loop);
4521 __ cmpl(rcx, rax);
4522 __ j(equal, &done_loop, Label::kNear);
4523 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4524 __ movp(
4525 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4526 kScratchRegister);
4527 __ subp(rbx, Immediate(1 * kPointerSize));
4528 __ addl(rcx, Immediate(1));
4529 __ jmp(&loop);
4530 __ bind(&done_loop);
4531 }
4532
4533 // Setup the rest parameter array in rax.
4534 __ leap(rax,
4535 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4536 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4537 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4538 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4539 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4540 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
4541 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
4542 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4543 __ Ret();
4544
4545 // Fall back to %AllocateInNewSpace.
4546 __ bind(&allocate);
4547 {
4548 FrameScope scope(masm, StackFrame::INTERNAL);
4549 __ Integer32ToSmi(rax, rax);
4550 __ Integer32ToSmi(rcx, rcx);
4551 __ Push(rax);
4552 __ Push(rbx);
4553 __ Push(rcx);
4554 __ CallRuntime(Runtime::kAllocateInNewSpace);
4555 __ movp(rdx, rax);
4556 __ Pop(rbx);
4557 __ Pop(rax);
4558 __ SmiToInteger32(rax, rax);
4559 }
4560 __ jmp(&done_allocate);
4561 }
4562}
4563
4564
4565void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4566 // ----------- S t a t e -------------
4567 // -- rdi : function
4568 // -- rsi : context
4569 // -- rbp : frame pointer
4570 // -- rsp[0] : return address
4571 // -----------------------------------
4572 __ AssertFunction(rdi);
4573
Ben Murdochc5610432016-08-08 18:44:38 +01004574 // Make r9 point to the JavaScript frame.
4575 __ movp(r9, rbp);
4576 if (skip_stub_frame()) {
4577 // For Ignition we need to skip the handler/stub frame to reach the
4578 // JavaScript frame for the function.
4579 __ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset));
4580 }
4581 if (FLAG_debug_code) {
4582 Label ok;
4583 __ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset));
4584 __ j(equal, &ok);
4585 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4586 __ bind(&ok);
4587 }
4588
Ben Murdoch097c5b22016-05-18 11:27:45 +01004589 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4590 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4591 __ LoadSharedFunctionInfoSpecialField(
4592 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
Ben Murdochc5610432016-08-08 18:44:38 +01004593 __ leap(rdx, Operand(r9, rcx, times_pointer_size,
Ben Murdoch097c5b22016-05-18 11:27:45 +01004594 StandardFrameConstants::kCallerSPOffset));
4595 __ Integer32ToSmi(rcx, rcx);
4596
4597 // rcx : number of parameters (tagged)
4598 // rdx : parameters pointer
4599 // rdi : function
4600 // rsp[0] : return address
Ben Murdochc5610432016-08-08 18:44:38 +01004601 // r9 : JavaScript frame pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004602 // Registers used over the whole function:
4603 // rbx: the mapped parameter count (untagged)
4604 // rax: the allocated object (tagged).
4605 Factory* factory = isolate()->factory();
4606
4607 __ SmiToInteger64(rbx, rcx);
4608 // rbx = parameter count (untagged)
4609
4610 // Check if the calling frame is an arguments adaptor frame.
4611 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004612 __ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004613 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004614 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4615 __ j(equal, &adaptor_frame);
4616
4617 // No adaptor, parameter count = argument count.
4618 __ movp(r11, rbx);
4619 __ jmp(&try_allocate, Label::kNear);
4620
4621 // We have an adaptor frame. Patch the parameters pointer.
4622 __ bind(&adaptor_frame);
4623 __ SmiToInteger64(
4624 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
4625 __ leap(rdx, Operand(rax, r11, times_pointer_size,
4626 StandardFrameConstants::kCallerSPOffset));
4627
4628 // rbx = parameter count (untagged)
4629 // r11 = argument count (untagged)
4630 // Compute the mapped parameter count = min(rbx, r11) in rbx.
4631 __ cmpp(rbx, r11);
4632 __ j(less_equal, &try_allocate, Label::kNear);
4633 __ movp(rbx, r11);
4634
4635 __ bind(&try_allocate);
4636
4637 // Compute the sizes of backing store, parameter map, and arguments object.
4638 // 1. Parameter map, has 2 extra words containing context and backing store.
4639 const int kParameterMapHeaderSize =
4640 FixedArray::kHeaderSize + 2 * kPointerSize;
4641 Label no_parameter_map;
4642 __ xorp(r8, r8);
4643 __ testp(rbx, rbx);
4644 __ j(zero, &no_parameter_map, Label::kNear);
4645 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
4646 __ bind(&no_parameter_map);
4647
4648 // 2. Backing store.
4649 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
4650
4651 // 3. Arguments object.
4652 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
4653
4654 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004655 __ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004656
4657 // rax = address of new object(s) (tagged)
4658 // r11 = argument count (untagged)
4659 // Get the arguments map from the current native context into r9.
4660 Label has_mapped_parameters, instantiate;
4661 __ movp(r9, NativeContextOperand());
4662 __ testp(rbx, rbx);
4663 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4664
4665 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
4666 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
4667 __ jmp(&instantiate, Label::kNear);
4668
4669 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
4670 __ bind(&has_mapped_parameters);
4671 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
4672 __ bind(&instantiate);
4673
4674 // rax = address of new object (tagged)
4675 // rbx = mapped parameter count (untagged)
4676 // r11 = argument count (untagged)
4677 // r9 = address of arguments map (tagged)
4678 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
4679 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
4680 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
4681 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
4682
4683 // Set up the callee in-object property.
4684 __ AssertNotSmi(rdi);
4685 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
4686
4687 // Use the length (smi tagged) and set that as an in-object property too.
4688 // Note: r11 is tagged from here on.
4689 __ Integer32ToSmi(r11, r11);
4690 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
4691
4692 // Set up the elements pointer in the allocated arguments object.
4693 // If we allocated a parameter map, rdi will point there, otherwise to the
4694 // backing store.
4695 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
4696 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
4697
4698 // rax = address of new object (tagged)
4699 // rbx = mapped parameter count (untagged)
4700 // r11 = argument count (tagged)
4701 // rdi = address of parameter map or backing store (tagged)
4702
4703 // Initialize parameter map. If there are no mapped arguments, we're done.
4704 Label skip_parameter_map;
4705 __ testp(rbx, rbx);
4706 __ j(zero, &skip_parameter_map);
4707
4708 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
4709 // rbx contains the untagged argument count. Add 2 and tag to write.
4710 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
4711 __ Integer64PlusConstantToSmi(r9, rbx, 2);
4712 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
4713 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
4714 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4715 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
4716
4717 // Copy the parameter slots and the holes in the arguments.
4718 // We need to fill in mapped_parameter_count slots. They index the context,
4719 // where parameters are stored in reverse order, at
4720 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4721 // The mapped parameter thus need to get indices
4722 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4723 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4724 // We loop from right to left.
4725 Label parameters_loop, parameters_test;
4726
4727 // Load tagged parameter count into r9.
4728 __ Integer32ToSmi(r9, rbx);
4729 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
4730 __ addp(r8, rcx);
4731 __ subp(r8, r9);
4732 __ movp(rcx, rdi);
4733 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4734 __ SmiToInteger64(r9, r9);
4735 // r9 = loop variable (untagged)
4736 // r8 = mapping index (tagged)
4737 // rcx = address of parameter map (tagged)
4738 // rdi = address of backing store (tagged)
4739 __ jmp(&parameters_test, Label::kNear);
4740
4741 __ bind(&parameters_loop);
4742 __ subp(r9, Immediate(1));
4743 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
4744 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
4745 r8);
4746 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
4747 kScratchRegister);
4748 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
4749 __ bind(&parameters_test);
4750 __ testp(r9, r9);
4751 __ j(not_zero, &parameters_loop, Label::kNear);
4752
4753 __ bind(&skip_parameter_map);
4754
4755 // r11 = argument count (tagged)
4756 // rdi = address of backing store (tagged)
4757 // Copy arguments header and remaining slots (if there are any).
4758 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
4759 factory->fixed_array_map());
4760 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
4761
4762 Label arguments_loop, arguments_test;
4763 __ movp(r8, rbx);
4764 // Untag r11 for the loop below.
4765 __ SmiToInteger64(r11, r11);
4766 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
4767 __ subp(rdx, kScratchRegister);
4768 __ jmp(&arguments_test, Label::kNear);
4769
4770 __ bind(&arguments_loop);
4771 __ subp(rdx, Immediate(kPointerSize));
4772 __ movp(r9, Operand(rdx, 0));
4773 __ movp(FieldOperand(rdi, r8,
4774 times_pointer_size,
4775 FixedArray::kHeaderSize),
4776 r9);
4777 __ addp(r8, Immediate(1));
4778
4779 __ bind(&arguments_test);
4780 __ cmpp(r8, r11);
4781 __ j(less, &arguments_loop, Label::kNear);
4782
4783 // Return.
4784 __ ret(0);
4785
4786 // Do the runtime call to allocate the arguments object.
4787 // r11 = argument count (untagged)
4788 __ bind(&runtime);
4789 __ Integer32ToSmi(r11, r11);
4790 __ PopReturnAddressTo(rax);
4791 __ Push(rdi); // Push function.
4792 __ Push(rdx); // Push parameters pointer.
4793 __ Push(r11); // Push parameter count.
4794 __ PushReturnAddressFrom(rax);
4795 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4796}
4797
4798
4799void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4800 // ----------- S t a t e -------------
4801 // -- rdi : function
4802 // -- rsi : context
4803 // -- rbp : frame pointer
4804 // -- rsp[0] : return address
4805 // -----------------------------------
4806 __ AssertFunction(rdi);
4807
Ben Murdochc5610432016-08-08 18:44:38 +01004808 // Make rdx point to the JavaScript frame.
4809 __ movp(rdx, rbp);
4810 if (skip_stub_frame()) {
4811 // For Ignition we need to skip the handler/stub frame to reach the
4812 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004813 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004814 }
4815 if (FLAG_debug_code) {
4816 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004817 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004818 __ j(equal, &ok);
4819 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4820 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004821 }
4822
4823 // Check if we have an arguments adaptor frame below the function frame.
4824 Label arguments_adaptor, arguments_done;
4825 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004826 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004827 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4828 __ j(equal, &arguments_adaptor, Label::kNear);
4829 {
4830 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4831 __ LoadSharedFunctionInfoSpecialField(
4832 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
4833 __ leap(rbx, Operand(rdx, rax, times_pointer_size,
4834 StandardFrameConstants::kCallerSPOffset -
4835 1 * kPointerSize));
4836 }
4837 __ jmp(&arguments_done, Label::kNear);
4838 __ bind(&arguments_adaptor);
4839 {
4840 __ SmiToInteger32(
4841 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4842 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4843 StandardFrameConstants::kCallerSPOffset -
4844 1 * kPointerSize));
4845 }
4846 __ bind(&arguments_done);
4847
4848 // ----------- S t a t e -------------
4849 // -- rax : number of arguments
4850 // -- rbx : pointer to the first argument
4851 // -- rsi : context
4852 // -- rsp[0] : return address
4853 // -----------------------------------
4854
4855 // Allocate space for the strict arguments object plus the backing store.
4856 Label allocate, done_allocate;
4857 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
4858 FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01004859 __ Allocate(rcx, rdx, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004860 __ bind(&done_allocate);
4861
4862 // Compute the arguments.length in rdi.
4863 __ Integer32ToSmi(rdi, rax);
4864
4865 // Setup the elements array in rdx.
4866 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4867 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4868 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4869 {
4870 Label loop, done_loop;
4871 __ Set(rcx, 0);
4872 __ bind(&loop);
4873 __ cmpl(rcx, rax);
4874 __ j(equal, &done_loop, Label::kNear);
4875 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4876 __ movp(
4877 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4878 kScratchRegister);
4879 __ subp(rbx, Immediate(1 * kPointerSize));
4880 __ addl(rcx, Immediate(1));
4881 __ jmp(&loop);
4882 __ bind(&done_loop);
4883 }
4884
4885 // Setup the strict arguments object in rax.
4886 __ leap(rax,
4887 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4888 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
4889 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
4890 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4891 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
4892 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
4893 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
4894 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4895 __ Ret();
4896
4897 // Fall back to %AllocateInNewSpace.
4898 __ bind(&allocate);
4899 {
4900 FrameScope scope(masm, StackFrame::INTERNAL);
4901 __ Integer32ToSmi(rax, rax);
4902 __ Integer32ToSmi(rcx, rcx);
4903 __ Push(rax);
4904 __ Push(rbx);
4905 __ Push(rcx);
4906 __ CallRuntime(Runtime::kAllocateInNewSpace);
4907 __ movp(rdx, rax);
4908 __ Pop(rbx);
4909 __ Pop(rax);
4910 __ SmiToInteger32(rax, rax);
4911 }
4912 __ jmp(&done_allocate);
4913}
4914
4915
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004916void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
4917 Register context_reg = rsi;
4918 Register slot_reg = rbx;
4919 Register result_reg = rax;
4920 Label slow_case;
4921
4922 // Go up context chain to the script context.
4923 for (int i = 0; i < depth(); ++i) {
4924 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4925 context_reg = rdi;
4926 }
4927
4928 // Load the PropertyCell value at the specified slot.
4929 __ movp(result_reg, ContextOperand(context_reg, slot_reg));
4930 __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
4931
4932 // Check that value is not the_hole.
4933 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
4934 __ j(equal, &slow_case, Label::kNear);
4935 __ Ret();
4936
4937 // Fallback to the runtime.
4938 __ bind(&slow_case);
4939 __ Integer32ToSmi(slot_reg, slot_reg);
4940 __ PopReturnAddressTo(kScratchRegister);
4941 __ Push(slot_reg);
4942 __ Push(kScratchRegister);
4943 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
4944}
4945
4946
4947void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4948 Register context_reg = rsi;
4949 Register slot_reg = rbx;
4950 Register value_reg = rax;
4951 Register cell_reg = r8;
4952 Register cell_details_reg = rdx;
4953 Register cell_value_reg = r9;
4954 Label fast_heapobject_case, fast_smi_case, slow_case;
4955
4956 if (FLAG_debug_code) {
4957 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
4958 __ Check(not_equal, kUnexpectedValue);
4959 }
4960
4961 // Go up context chain to the script context.
4962 for (int i = 0; i < depth(); ++i) {
4963 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4964 context_reg = rdi;
4965 }
4966
4967 // Load the PropertyCell at the specified slot.
4968 __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
4969
4970 // Load PropertyDetails for the cell (actually only the cell_type, kind and
4971 // READ_ONLY bit of attributes).
4972 __ SmiToInteger32(cell_details_reg,
4973 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
4974 __ andl(cell_details_reg,
4975 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
4976 PropertyDetails::KindField::kMask |
4977 PropertyDetails::kAttributesReadOnlyMask));
4978
4979 // Check if PropertyCell holds mutable data.
4980 Label not_mutable_data;
4981 __ cmpl(cell_details_reg,
4982 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4983 PropertyCellType::kMutable) |
4984 PropertyDetails::KindField::encode(kData)));
4985 __ j(not_equal, &not_mutable_data);
4986 __ JumpIfSmi(value_reg, &fast_smi_case);
4987 __ bind(&fast_heapobject_case);
4988 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
4989 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
4990 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
4991 OMIT_SMI_CHECK);
4992 // RecordWriteField clobbers the value register, so we need to reload.
4993 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
4994 __ Ret();
4995 __ bind(&not_mutable_data);
4996
4997 // Check if PropertyCell value matches the new value (relevant for Constant,
4998 // ConstantType and Undefined cells).
4999 Label not_same_value;
5000 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5001 __ cmpp(cell_value_reg, value_reg);
5002 __ j(not_equal, &not_same_value,
5003 FLAG_debug_code ? Label::kFar : Label::kNear);
5004 // Make sure the PropertyCell is not marked READ_ONLY.
5005 __ testl(cell_details_reg,
5006 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5007 __ j(not_zero, &slow_case);
5008 if (FLAG_debug_code) {
5009 Label done;
5010 // This can only be true for Constant, ConstantType and Undefined cells,
5011 // because we never store the_hole via this stub.
5012 __ cmpl(cell_details_reg,
5013 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5014 PropertyCellType::kConstant) |
5015 PropertyDetails::KindField::encode(kData)));
5016 __ j(equal, &done);
5017 __ cmpl(cell_details_reg,
5018 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5019 PropertyCellType::kConstantType) |
5020 PropertyDetails::KindField::encode(kData)));
5021 __ j(equal, &done);
5022 __ cmpl(cell_details_reg,
5023 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5024 PropertyCellType::kUndefined) |
5025 PropertyDetails::KindField::encode(kData)));
5026 __ Check(equal, kUnexpectedValue);
5027 __ bind(&done);
5028 }
5029 __ Ret();
5030 __ bind(&not_same_value);
5031
5032 // Check if PropertyCell contains data with constant type (and is not
5033 // READ_ONLY).
5034 __ cmpl(cell_details_reg,
5035 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5036 PropertyCellType::kConstantType) |
5037 PropertyDetails::KindField::encode(kData)));
5038 __ j(not_equal, &slow_case, Label::kNear);
5039
5040 // Now either both old and new values must be SMIs or both must be heap
5041 // objects with same map.
5042 Label value_is_heap_object;
5043 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5044 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5045 // Old and new values are SMIs, no need for a write barrier here.
5046 __ bind(&fast_smi_case);
5047 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5048 __ Ret();
5049 __ bind(&value_is_heap_object);
5050 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5051 Register cell_value_map_reg = cell_value_reg;
5052 __ movp(cell_value_map_reg,
5053 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5054 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5055 __ j(equal, &fast_heapobject_case);
5056
5057 // Fallback to the runtime.
5058 __ bind(&slow_case);
5059 __ Integer32ToSmi(slot_reg, slot_reg);
5060 __ PopReturnAddressTo(kScratchRegister);
5061 __ Push(slot_reg);
5062 __ Push(value_reg);
5063 __ Push(kScratchRegister);
5064 __ TailCallRuntime(is_strict(language_mode())
5065 ? Runtime::kStoreGlobalViaContext_Strict
5066 : Runtime::kStoreGlobalViaContext_Sloppy);
5067}
5068
5069
5070static int Offset(ExternalReference ref0, ExternalReference ref1) {
5071 int64_t offset = (ref0.address() - ref1.address());
5072 // Check that fits into int.
5073 DCHECK(static_cast<int>(offset) == offset);
5074 return static_cast<int>(offset);
5075}
5076
5077
5078// Prepares stack to put arguments (aligns and so on). WIN64 calling
5079// convention requires to put the pointer to the return value slot into
5080// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
5081// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
5082// inside the exit frame (not GCed) accessible via StackSpaceOperand.
5083static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
5084 __ EnterApiExitFrame(arg_stack_space);
5085}
5086
5087
5088// Calls an API function. Allocates HandleScope, extracts returned value
5089// from handle and propagates exceptions. Clobbers r14, r15, rbx and
5090// caller-save registers. Restores context. On return removes
5091// stack_space * kPointerSize (GCed).
5092static void CallApiFunctionAndReturn(MacroAssembler* masm,
5093 Register function_address,
5094 ExternalReference thunk_ref,
5095 Register thunk_last_arg, int stack_space,
5096 Operand* stack_space_operand,
5097 Operand return_value_operand,
5098 Operand* context_restore_operand) {
5099 Label prologue;
5100 Label promote_scheduled_exception;
5101 Label delete_allocated_handles;
5102 Label leave_exit_frame;
5103 Label write_back;
5104
5105 Isolate* isolate = masm->isolate();
5106 Factory* factory = isolate->factory();
5107 ExternalReference next_address =
5108 ExternalReference::handle_scope_next_address(isolate);
5109 const int kNextOffset = 0;
5110 const int kLimitOffset = Offset(
5111 ExternalReference::handle_scope_limit_address(isolate), next_address);
5112 const int kLevelOffset = Offset(
5113 ExternalReference::handle_scope_level_address(isolate), next_address);
5114 ExternalReference scheduled_exception_address =
5115 ExternalReference::scheduled_exception_address(isolate);
5116
5117 DCHECK(rdx.is(function_address) || r8.is(function_address));
5118 // Allocate HandleScope in callee-save registers.
5119 Register prev_next_address_reg = r14;
5120 Register prev_limit_reg = rbx;
5121 Register base_reg = r15;
5122 __ Move(base_reg, next_address);
5123 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
5124 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5125 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
5126
5127 if (FLAG_log_timer_events) {
5128 FrameScope frame(masm, StackFrame::MANUAL);
5129 __ PushSafepointRegisters();
5130 __ PrepareCallCFunction(1);
5131 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5132 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5133 1);
5134 __ PopSafepointRegisters();
5135 }
5136
5137 Label profiler_disabled;
5138 Label end_profiler_check;
5139 __ Move(rax, ExternalReference::is_profiling_address(isolate));
5140 __ cmpb(Operand(rax, 0), Immediate(0));
5141 __ j(zero, &profiler_disabled);
5142
5143 // Third parameter is the address of the actual getter function.
5144 __ Move(thunk_last_arg, function_address);
5145 __ Move(rax, thunk_ref);
5146 __ jmp(&end_profiler_check);
5147
5148 __ bind(&profiler_disabled);
5149 // Call the api function!
5150 __ Move(rax, function_address);
5151
5152 __ bind(&end_profiler_check);
5153
5154 // Call the api function!
5155 __ call(rax);
5156
5157 if (FLAG_log_timer_events) {
5158 FrameScope frame(masm, StackFrame::MANUAL);
5159 __ PushSafepointRegisters();
5160 __ PrepareCallCFunction(1);
5161 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5162 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5163 1);
5164 __ PopSafepointRegisters();
5165 }
5166
5167 // Load the value from ReturnValue
5168 __ movp(rax, return_value_operand);
5169 __ bind(&prologue);
5170
5171 // No more valid handles (the result handle was the last one). Restore
5172 // previous handle scope.
5173 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
5174 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
5175 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5176 __ j(not_equal, &delete_allocated_handles);
5177
5178 // Leave the API exit frame.
5179 __ bind(&leave_exit_frame);
5180 bool restore_context = context_restore_operand != NULL;
5181 if (restore_context) {
5182 __ movp(rsi, *context_restore_operand);
5183 }
5184 if (stack_space_operand != nullptr) {
5185 __ movp(rbx, *stack_space_operand);
5186 }
5187 __ LeaveApiExitFrame(!restore_context);
5188
5189 // Check if the function scheduled an exception.
5190 __ Move(rdi, scheduled_exception_address);
5191 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5192 __ j(not_equal, &promote_scheduled_exception);
5193
5194#if DEBUG
5195 // Check if the function returned a valid JavaScript value.
5196 Label ok;
5197 Register return_value = rax;
5198 Register map = rcx;
5199
5200 __ JumpIfSmi(return_value, &ok, Label::kNear);
5201 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
5202
5203 __ CmpInstanceType(map, LAST_NAME_TYPE);
5204 __ j(below_equal, &ok, Label::kNear);
5205
5206 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5207 __ j(above_equal, &ok, Label::kNear);
5208
5209 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5210 __ j(equal, &ok, Label::kNear);
5211
5212 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
5213 __ j(equal, &ok, Label::kNear);
5214
5215 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
5216 __ j(equal, &ok, Label::kNear);
5217
5218 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
5219 __ j(equal, &ok, Label::kNear);
5220
5221 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
5222 __ j(equal, &ok, Label::kNear);
5223
5224 __ Abort(kAPICallReturnedInvalidObject);
5225
5226 __ bind(&ok);
5227#endif
5228
5229 if (stack_space_operand != nullptr) {
5230 DCHECK_EQ(stack_space, 0);
5231 __ PopReturnAddressTo(rcx);
5232 __ addq(rsp, rbx);
5233 __ jmp(rcx);
5234 } else {
5235 __ ret(stack_space * kPointerSize);
5236 }
5237
5238 // Re-throw by promoting a scheduled exception.
5239 __ bind(&promote_scheduled_exception);
5240 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5241
5242 // HandleScope limit has changed. Delete allocated extensions.
5243 __ bind(&delete_allocated_handles);
5244 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
5245 __ movp(prev_limit_reg, rax);
5246 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5247 __ LoadAddress(rax,
5248 ExternalReference::delete_handle_scope_extensions(isolate));
5249 __ call(rax);
5250 __ movp(rax, prev_limit_reg);
5251 __ jmp(&leave_exit_frame);
5252}
5253
Ben Murdochda12d292016-06-02 14:46:10 +01005254void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005255 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005256 // -- rdi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005257 // -- rbx : call_data
5258 // -- rcx : holder
5259 // -- rdx : api_function_address
5260 // -- rsi : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005261 // -- rax : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005262 // -- rsp[0] : return address
5263 // -- rsp[8] : last argument
5264 // -- ...
5265 // -- rsp[argc * 8] : first argument
5266 // -- rsp[(argc + 1) * 8] : receiver
5267 // -----------------------------------
5268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005269 Register callee = rdi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005270 Register call_data = rbx;
5271 Register holder = rcx;
5272 Register api_function_address = rdx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005273 Register context = rsi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005274 Register return_address = r8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005275
5276 typedef FunctionCallbackArguments FCA;
5277
5278 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5279 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5280 STATIC_ASSERT(FCA::kDataIndex == 4);
5281 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5282 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5283 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5284 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005285 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5286 STATIC_ASSERT(FCA::kArgsLength == 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005287
5288 __ PopReturnAddressTo(return_address);
5289
Ben Murdochc5610432016-08-08 18:44:38 +01005290 // new target
5291 __ PushRoot(Heap::kUndefinedValueRootIndex);
5292
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005293 // context save
5294 __ Push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005295
5296 // callee
5297 __ Push(callee);
5298
5299 // call data
5300 __ Push(call_data);
5301 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005302 if (!this->call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005303 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5304 }
5305 // return value
5306 __ Push(scratch);
5307 // return value default
5308 __ Push(scratch);
5309 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005310 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005311 __ Push(scratch);
5312 // holder
5313 __ Push(holder);
5314
5315 __ movp(scratch, rsp);
5316 // Push return address back on stack.
5317 __ PushReturnAddressFrom(return_address);
5318
Ben Murdochda12d292016-06-02 14:46:10 +01005319 if (!this->is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005320 // load context from callee
5321 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5322 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005324 // Allocate the v8::Arguments structure in the arguments' space since
5325 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005326 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005328 PrepareCallApiFunction(masm, kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005329
5330 // FunctionCallbackInfo::implicit_args_.
Ben Murdochda12d292016-06-02 14:46:10 +01005331 int argc = this->argc();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005332 __ movp(StackSpaceOperand(0), scratch);
Ben Murdochda12d292016-06-02 14:46:10 +01005333 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5334 // FunctionCallbackInfo::values_.
5335 __ movp(StackSpaceOperand(1), scratch);
5336 // FunctionCallbackInfo::length_.
5337 __ Set(StackSpaceOperand(2), argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005338
5339#if defined(__MINGW64__) || defined(_WIN64)
5340 Register arguments_arg = rcx;
5341 Register callback_arg = rdx;
5342#else
5343 Register arguments_arg = rdi;
5344 Register callback_arg = rsi;
5345#endif
5346
5347 // It's okay if api_function_address == callback_arg
5348 // but not arguments_arg
5349 DCHECK(!api_function_address.is(arguments_arg));
5350
5351 // v8::InvocationCallback's argument.
5352 __ leap(arguments_arg, StackSpaceOperand(0));
5353
5354 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005355 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005356
5357 // Accessor for FunctionCallbackInfo and first js arg.
5358 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5359 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5360 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5361 FCA::kArgsLength - FCA::kContextSaveIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01005362 Operand length_operand = StackSpaceOperand(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005363 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
Ben Murdochda12d292016-06-02 14:46:10 +01005364 this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005365 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005366 Operand* stack_space_operand = &length_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005367 stack_space = argc + FCA::kArgsLength + 1;
5368 stack_space_operand = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005369 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5370 stack_space, stack_space_operand,
5371 return_value_operand, &context_restore_operand);
5372}
5373
5374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005375void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005376#if defined(__MINGW64__) || defined(_WIN64)
5377 Register getter_arg = r8;
5378 Register accessor_info_arg = rdx;
5379 Register name_arg = rcx;
5380#else
5381 Register getter_arg = rdx;
5382 Register accessor_info_arg = rsi;
5383 Register name_arg = rdi;
5384#endif
Ben Murdochc5610432016-08-08 18:44:38 +01005385 Register api_function_address = r8;
5386 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5387 Register holder = ApiGetterDescriptor::HolderRegister();
5388 Register callback = ApiGetterDescriptor::CallbackRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005389 Register scratch = rax;
Ben Murdochc5610432016-08-08 18:44:38 +01005390 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5391
5392 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5393 // name below the exit frame to make GC aware of them.
5394 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5395 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5396 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5397 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5398 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5399 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5400 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5401 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
5402
5403 // Insert additional parameters into the stack frame above return address.
5404 __ PopReturnAddressTo(scratch);
5405 __ Push(receiver);
5406 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
5407 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
5408 __ Push(kScratchRegister); // return value
5409 __ Push(kScratchRegister); // return value default
5410 __ PushAddress(ExternalReference::isolate_address(isolate()));
5411 __ Push(holder);
5412 __ Push(Smi::FromInt(0)); // should_throw_on_error -> false
5413 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
5414 __ PushReturnAddressFrom(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005415
Ben Murdoch097c5b22016-05-18 11:27:45 +01005416 // v8::PropertyCallbackInfo::args_ array and name handle.
5417 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005418
Ben Murdoch097c5b22016-05-18 11:27:45 +01005419 // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005420 const int kArgStackSpace = 1;
5421
Ben Murdoch097c5b22016-05-18 11:27:45 +01005422 // Load address of v8::PropertyAccessorInfo::args_ array.
5423 __ leap(scratch, Operand(rsp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005424
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005425 PrepareCallApiFunction(masm, kArgStackSpace);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005426 // Create v8::PropertyCallbackInfo object on the stack and initialize
5427 // it's args_ field.
5428 Operand info_object = StackSpaceOperand(0);
5429 __ movp(info_object, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005430
Ben Murdoch097c5b22016-05-18 11:27:45 +01005431 __ leap(name_arg, Operand(scratch, -kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005432 // The context register (rsi) has been saved in PrepareCallApiFunction and
5433 // could be used to pass arguments.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005434 __ leap(accessor_info_arg, info_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005435
5436 ExternalReference thunk_ref =
5437 ExternalReference::invoke_accessor_getter_callback(isolate());
5438
5439 // It's okay if api_function_address == getter_arg
5440 // but not accessor_info_arg or name_arg
Ben Murdochc5610432016-08-08 18:44:38 +01005441 DCHECK(!api_function_address.is(accessor_info_arg));
5442 DCHECK(!api_function_address.is(name_arg));
5443 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
5444 __ movp(api_function_address,
5445 FieldOperand(scratch, Foreign::kForeignAddressOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005446
Ben Murdoch097c5b22016-05-18 11:27:45 +01005447 // +3 is to skip prolog, return address and name handle.
5448 Operand return_value_operand(
5449 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005450 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005451 kStackUnwindSpace, nullptr, return_value_operand,
5452 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005453}
5454
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005455#undef __
5456
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005457} // namespace internal
5458} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005459
5460#endif // V8_TARGET_ARCH_X64