blob: b89438f36572f889409230ba666569109a218b0f [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-stubs.h"
Ben Murdochda12d292016-06-02 14:46:10 +01008#include "src/api-arguments.h"
9#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/regexp/jsregexp.h"
16#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000018#include "src/x64/code-stubs-x64.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010019
20namespace v8 {
21namespace internal {
22
Ben Murdoch61f157c2016-09-16 13:49:30 +010023#define __ ACCESS_MASM(masm)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024
Ben Murdoch61f157c2016-09-16 13:49:30 +010025void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
26 __ popq(rcx);
27 __ movq(MemOperand(rsp, rax, times_8, 0), rdi);
28 __ pushq(rdi);
29 __ pushq(rbx);
30 __ pushq(rcx);
31 __ addq(rax, Immediate(3));
32 __ TailCallRuntime(Runtime::kNewArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
Ben Murdochda12d292016-06-02 14:46:10 +010035void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
36 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
37 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
38}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039
Ben Murdoch61f157c2016-09-16 13:49:30 +010040void FastFunctionBindStub::InitializeDescriptor(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 CodeStubDescriptor* descriptor) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010042 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
43 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044}
45
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
47 ExternalReference miss) {
48 // Update the static counter each time a new code stub is generated.
49 isolate()->counters()->code_stubs()->Increment();
50
51 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 {
54 // Call the runtime system in a fresh internal frame.
55 FrameScope scope(masm, StackFrame::INTERNAL);
56 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000058 // Push arguments
59 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 __ Push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 }
62 __ CallExternalReference(miss, param_count);
63 }
64
Steve Block1e0659c2011-05-24 12:43:12 +010065 __ Ret();
Ben Murdoch69a99ed2011-11-30 16:03:39 +000066}
67
68
Ben Murdoch3ef787d2012-04-12 10:51:47 +010069void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010071 const int argument_count = 1;
72 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 __ LoadAddress(arg_reg_1,
74 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010075
76 AllowExternalCallThatCantCauseGC scope(masm);
77 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000078 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010079 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010081 __ ret(0);
82}
83
84
Kristian Monsen80d68ea2010-09-08 11:05:35 +010085class FloatingPointHelper : public AllStatic {
86 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 enum ConvertUndefined {
88 CONVERT_UNDEFINED_TO_ZERO,
89 BAILOUT_ON_UNDEFINED
90 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +010091 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
92 // If the operands are not both numbers, jump to not_numbers.
93 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
94 // NumberOperands assumes both are smis or heap numbers.
Kristian Monsen80d68ea2010-09-08 11:05:35 +010095 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
96 Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010097};
98
99
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100void DoubleToIStub::Generate(MacroAssembler* masm) {
101 Register input_reg = this->source();
102 Register final_result_reg = this->destination();
103 DCHECK(is_truncating());
Ben Murdoch257744e2011-11-30 15:57:28 +0000104
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 Label check_negative, process_64_bits, done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000106
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 int double_offset = offset();
Ben Murdoch257744e2011-11-30 15:57:28 +0000108
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109 // Account for return address and saved regs if input is rsp.
110 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000111
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
113 MemOperand exponent_operand(MemOperand(input_reg,
114 double_offset + kDoubleSize / 2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000115
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000116 Register scratch1;
117 Register scratch_candidates[3] = { rbx, rdx, rdi };
118 for (int i = 0; i < 3; i++) {
119 scratch1 = scratch_candidates[i];
120 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
Steve Block1e0659c2011-05-24 12:43:12 +0100121 }
Ben Murdoch8b112d22011-06-08 16:22:53 +0100122
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 // Since we must use rcx for shifts below, use some other register (rax)
124 // to calculate the result if ecx is the requested return register.
125 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
126 // Save ecx if it isn't the return register and therefore volatile, or if it
127 // is the return register, then save the temp register we use in its stead
128 // for the result.
129 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
130 __ pushq(scratch1);
131 __ pushq(save_reg);
132
133 bool stash_exponent_copy = !input_reg.is(rsp);
134 __ movl(scratch1, mantissa_operand);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100135 __ Movsd(kScratchDoubleReg, mantissa_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136 __ movl(rcx, exponent_operand);
137 if (stash_exponent_copy) __ pushq(rcx);
138
139 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
140 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
141 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
142 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
143 __ j(below, &process_64_bits);
144
145 // Result is entirely in lower 32-bits of mantissa
146 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
147 __ subl(rcx, Immediate(delta));
148 __ xorl(result_reg, result_reg);
149 __ cmpl(rcx, Immediate(31));
150 __ j(above, &done);
151 __ shll_cl(scratch1);
152 __ jmp(&check_negative);
153
154 __ bind(&process_64_bits);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100155 __ Cvttsd2siq(result_reg, kScratchDoubleReg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000156 __ jmp(&done, Label::kNear);
157
158 // If the double was negative, negate the integer result.
159 __ bind(&check_negative);
160 __ movl(result_reg, scratch1);
161 __ negl(result_reg);
162 if (stash_exponent_copy) {
163 __ cmpl(MemOperand(rsp, 0), Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100164 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 __ cmpl(exponent_operand, Immediate(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +0100166 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167 __ cmovl(greater, result_reg, scratch1);
Steve Block1e0659c2011-05-24 12:43:12 +0100168
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000169 // Restore registers
Ben Murdochb0fe1622011-05-05 13:52:32 +0100170 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 if (stash_exponent_copy) {
172 __ addp(rsp, Immediate(kDoubleSize));
173 }
174 if (!final_result_reg.is(result_reg)) {
175 DCHECK(final_result_reg.is(rcx));
176 __ movl(final_result_reg, result_reg);
177 }
178 __ popq(save_reg);
179 __ popq(scratch1);
180 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100181}
182
183
184void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
185 Label* not_numbers) {
186 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
187 // Load operand in rdx into xmm0, or branch to not_numbers.
188 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
189 __ JumpIfSmi(rdx, &load_smi_rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100191 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000192 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100193 // Load operand in rax into xmm1, or branch to not_numbers.
194 __ JumpIfSmi(rax, &load_smi_rax);
195
196 __ bind(&load_nonsmi_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000197 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100198 __ j(not_equal, not_numbers);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000199 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100200 __ jmp(&done);
201
202 __ bind(&load_smi_rdx);
203 __ SmiToInteger32(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 __ Cvtlsi2sd(xmm0, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100205 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
206
207 __ bind(&load_smi_rax);
208 __ SmiToInteger32(kScratchRegister, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 __ Cvtlsi2sd(xmm1, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100210 __ bind(&done);
211}
212
213
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100214void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000215 const Register exponent = MathPowTaggedDescriptor::exponent();
216 DCHECK(exponent.is(rdx));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100217 const Register base = rax;
218 const Register scratch = rcx;
219 const XMMRegister double_result = xmm3;
220 const XMMRegister double_base = xmm2;
221 const XMMRegister double_exponent = xmm1;
222 const XMMRegister double_scratch = xmm4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100223
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100224 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100225
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100226 // Save 1 in double_result - we need this several times later on.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227 __ movp(scratch, Immediate(1));
228 __ Cvtlsi2sd(double_result, scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100229
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231 Label base_is_smi, unpack_exponent;
232 // The exponent and base are supplied as arguments on the stack.
233 // This can only happen if the stub is called from non-optimized code.
234 // Load input parameters from stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
236 __ movp(base, args.GetArgumentOperand(0));
237 __ movp(exponent, args.GetArgumentOperand(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100238 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
239 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
240 Heap::kHeapNumberMapRootIndex);
241 __ j(not_equal, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100242
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100244 __ jmp(&unpack_exponent, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100245
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 __ bind(&base_is_smi);
247 __ SmiToInteger32(base, base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 __ Cvtlsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100249 __ bind(&unpack_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100250
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100251 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
252 __ SmiToInteger32(exponent, exponent);
253 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100254
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100255 __ bind(&exponent_not_smi);
256 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
257 Heap::kHeapNumberMapRootIndex);
258 __ j(not_equal, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100261 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
262 __ SmiToInteger32(exponent, exponent);
263 __ jmp(&int_exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100264
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 __ bind(&exponent_not_smi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100267 }
268
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 if (exponent_type() != INTEGER) {
270 Label fast_power, try_arithmetic_simplification;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100271 // Detect integer exponents stored as double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 __ DoubleToI(exponent, double_exponent, double_scratch,
273 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
274 &try_arithmetic_simplification,
275 &try_arithmetic_simplification);
276 __ jmp(&int_exponent);
277
278 __ bind(&try_arithmetic_simplification);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 __ Cvttsd2si(exponent, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100280 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281 __ cmpl(exponent, Immediate(0x1));
282 __ j(overflow, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100283
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100285 // Detect square root case. Crankshaft detects constant +/-0.5 at
286 // compile time and uses DoMathPowHalf instead. We then skip this check
287 // for non-constant cases of +/-0.5 as these hardly occur.
288 Label continue_sqrt, continue_rsqrt, not_plus_half;
289 // Test for 0.5.
290 // Load double_scratch with 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000292 __ Movq(double_scratch, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100293 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100295 __ j(not_equal, &not_plus_half, Label::kNear);
296
297 // Calculates square root of base. Check for the special case of
298 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
299 // According to IEEE-754, double-precision -Infinity has the highest
300 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 __ Movq(double_scratch, scratch);
303 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100304 // Comparing -Infinity with NaN results in "unordered", which sets the
305 // zero flag as if both were equal. However, it also sets the carry flag.
306 __ j(not_equal, &continue_sqrt, Label::kNear);
307 __ j(carry, &continue_sqrt, Label::kNear);
308
309 // Set result to Infinity in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 __ Xorpd(double_result, double_result);
311 __ Subsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100312 __ jmp(&done);
313
314 __ bind(&continue_sqrt);
315 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316 __ Xorpd(double_scratch, double_scratch);
317 __ Addsd(double_scratch, double_base); // Convert -0 to 0.
318 __ Sqrtsd(double_result, double_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100319 __ jmp(&done);
320
321 // Test for -0.5.
322 __ bind(&not_plus_half);
323 // Load double_scratch with -0.5 by substracting 1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 __ Subsd(double_scratch, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100325 // Already ruled out NaNs for exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 __ Ucomisd(double_scratch, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 __ j(not_equal, &fast_power, Label::kNear);
328
329 // Calculates reciprocal of square root of base. Check for the special
330 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
331 // According to IEEE-754, double-precision -Infinity has the highest
332 // 12 bits set and the lowest 52 bits cleared.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 __ Movq(double_scratch, scratch);
335 __ Ucomisd(double_scratch, double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336 // Comparing -Infinity with NaN results in "unordered", which sets the
337 // zero flag as if both were equal. However, it also sets the carry flag.
338 __ j(not_equal, &continue_rsqrt, Label::kNear);
339 __ j(carry, &continue_rsqrt, Label::kNear);
340
341 // Set result to 0 in the special case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 __ Xorpd(double_result, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100343 __ jmp(&done);
344
345 __ bind(&continue_rsqrt);
346 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000347 __ Xorpd(double_exponent, double_exponent);
348 __ Addsd(double_exponent, double_base); // Convert -0 to +0.
349 __ Sqrtsd(double_exponent, double_exponent);
350 __ Divsd(double_result, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100351 __ jmp(&done);
352 }
353
354 // Using FPU instructions to calculate power.
355 Label fast_power_failed;
356 __ bind(&fast_power);
357 __ fnclex(); // Clear flags to catch exceptions later.
358 // Transfer (B)ase and (E)xponent onto the FPU register stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 __ subp(rsp, Immediate(kDoubleSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 __ Movsd(Operand(rsp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 __ fld_d(Operand(rsp, 0)); // E
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 __ Movsd(Operand(rsp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100363 __ fld_d(Operand(rsp, 0)); // B, E
364
365 // Exponent is in st(1) and base is in st(0)
366 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
367 // FYL2X calculates st(1) * log2(st(0))
368 __ fyl2x(); // X
369 __ fld(0); // X, X
370 __ frndint(); // rnd(X), X
371 __ fsub(1); // rnd(X), X-rnd(X)
372 __ fxch(1); // X - rnd(X), rnd(X)
373 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
374 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
375 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000376 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100377 // FSCALE calculates st(0) * 2^st(1)
378 __ fscale(); // 2^X, rnd(X)
379 __ fstp(1);
380 // Bail out to runtime in case of exceptions in the status word.
381 __ fnstsw_ax();
382 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
383 __ j(not_zero, &fast_power_failed, Label::kNear);
384 __ fstp_d(Operand(rsp, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 __ Movsd(double_result, Operand(rsp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100387 __ jmp(&done);
388
389 __ bind(&fast_power_failed);
390 __ fninit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000391 __ addp(rsp, Immediate(kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100392 __ jmp(&call_runtime);
393 }
394
395 // Calculate power with integer exponent.
396 __ bind(&int_exponent);
397 const XMMRegister double_scratch2 = double_exponent;
398 // Back up exponent as we need to check if exponent is negative later.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000399 __ movp(scratch, exponent); // Back up exponent.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 __ Movsd(double_scratch, double_base); // Back up base.
401 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100402
403 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100405 __ testl(scratch, scratch);
406 __ j(positive, &no_neg, Label::kNear);
407 __ negl(scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100408 __ bind(&no_neg);
409
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 __ j(zero, &while_false, Label::kNear);
411 __ shrl(scratch, Immediate(1));
412 // Above condition means CF==0 && ZF==0. This means that the
413 // bit that has been shifted out is 0 and the result is not 0.
414 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 __ Movsd(double_result, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000416 __ j(zero, &while_false, Label::kNear);
417
Ben Murdoch85b71792012-04-11 18:30:58 +0100418 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100419 __ shrl(scratch, Immediate(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420 __ Mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421 __ j(above, &while_true, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000422 __ Mulsd(double_result, double_scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100423 __ j(not_zero, &while_true);
424
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000425 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100426 // If the exponent is negative, return 1/result.
427 __ testl(exponent, exponent);
428 __ j(greater, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429 __ Divsd(double_scratch2, double_result);
430 __ Movsd(double_result, double_scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100431 // Test whether result is zero. Bail out to check for subnormal result.
432 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 __ Xorpd(double_scratch2, double_scratch2);
434 __ Ucomisd(double_scratch2, double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 // double_exponent aliased as double_scratch2 has already been overwritten
436 // and may not have contained the exponent value in the first place when the
437 // input was a smi. We reset it with exponent value before bailing out.
438 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 __ Cvtlsi2sd(double_exponent, exponent);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100440
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100441 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100443 // The arguments are still on the stack.
444 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100446
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100447 // The stub is called from non-optimized code, which expects the result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000448 // as heap number in rax.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449 __ bind(&done);
450 __ AllocateHeapNumber(rax, rcx, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100452 __ ret(2 * kPointerSize);
453 } else {
454 __ bind(&call_runtime);
455 // Move base to the correct argument register. Exponent is already in xmm1.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ Movsd(xmm0, double_base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 DCHECK(double_exponent.is(xmm1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 {
459 AllowExternalCallThatCantCauseGC scope(masm);
460 __ PrepareCallCFunction(2);
461 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 ExternalReference::power_double_double_function(isolate()), 2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100463 }
464 // Return value is in xmm0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000465 __ Movsd(double_result, xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100466
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100467 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100468 __ ret(0);
469 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100470}
471
472
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
474 Label miss;
475 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400476 // Ensure that the vector and slot registers won't be clobbered before
477 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
479 LoadDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
481 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
482 r9, &miss);
483 __ bind(&miss);
484 PropertyAccessCompiler::TailCallBuiltin(
485 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
486}
487
488
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400489void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
490 // Return address is on the stack.
491 Label miss;
492
493 Register receiver = LoadDescriptor::ReceiverRegister();
494 Register index = LoadDescriptor::NameRegister();
495 Register scratch = rdi;
496 Register result = rax;
497 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
499 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400500
501 // StringCharAtGenerator doesn't use the result register until it's passed
502 // the different miss possibilities. If it did, we would have a conflict
503 // when FLAG_vector_ics is true.
504 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
505 &miss, // When not a string.
506 &miss, // When not a number.
507 &miss, // When index out of range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400508 RECEIVER_IS_STRING);
509 char_at_generator.GenerateFast(masm);
510 __ ret(0);
511
512 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400514
515 __ bind(&miss);
516 PropertyAccessCompiler::TailCallBuiltin(
517 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
518}
519
520
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100521void RegExpExecStub::Generate(MacroAssembler* masm) {
522 // Just jump directly to runtime if native RegExp is not selected at compile
523 // time or if regexp entry in generated code is turned off runtime switch or
524 // at compilation.
525#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100527#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100528
529 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 // rsp[0] : return address
531 // rsp[8] : last_match_info (expected JSArray)
532 // rsp[16] : previous index
533 // rsp[24] : subject string
534 // rsp[32] : JSRegExp object
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100535
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 enum RegExpExecStubArgumentIndices {
537 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
538 SUBJECT_STRING_ARGUMENT_INDEX,
539 PREVIOUS_INDEX_ARGUMENT_INDEX,
540 LAST_MATCH_INFO_ARGUMENT_INDEX,
541 REG_EXP_EXEC_ARGUMENT_COUNT
542 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100543
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
545 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100546 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100547 // Ensure that a RegExp stack is allocated.
548 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100550 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100552 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 __ testp(kScratchRegister, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100554 __ j(zero, &runtime);
555
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100556 // Check that the first argument is a JSRegExp object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100558 __ JumpIfSmi(rax, &runtime);
559 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
560 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100562 // Check that the RegExp has been compiled (data contains a fixed array).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100564 if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100565 Condition is_smi = masm->CheckSmi(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100566 __ Check(NegateCondition(is_smi),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567 kUnexpectedTypeForRegExpDataFixedArrayExpected);
Steve Block44f0eee2011-05-26 01:26:41 +0100568 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100570 }
571
Steve Block44f0eee2011-05-26 01:26:41 +0100572 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100573 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
Steve Block44f0eee2011-05-26 01:26:41 +0100574 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100575 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
576 __ j(not_equal, &runtime);
577
Steve Block44f0eee2011-05-26 01:26:41 +0100578 // rax: RegExp data (FixedArray)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100579 // Check that the number of captures fit in the static offsets vector buffer.
580 __ SmiToInteger32(rdx,
Steve Block44f0eee2011-05-26 01:26:41 +0100581 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 // Check (number_of_captures + 1) * 2 <= offsets vector size
583 // Or number_of_captures <= offsets vector size / 2 - 1
584 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
585 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100586 __ j(above, &runtime);
587
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000588 // Reset offset for possibly sliced string.
589 __ Set(r14, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
591 __ JumpIfSmi(rdi, &runtime);
592 __ movp(r15, rdi); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593 // rax: RegExp data (FixedArray)
594 // rdi: subject string
595 // r15: subject string
596 // Handle subject string according to its encoding and representation:
597 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100598 // (2) Sequential one byte? If yes, go to (5).
599 // (3) Sequential or cons? If not, go to (6).
600 // (4) Cons string. If the string is flat, replace subject with first string
601 // and go to (1). Otherwise bail out to runtime.
602 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603 // (E) Carry on.
604 /// [...]
605
606 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100607 // (6) Long external string? If not, go to (10).
608 // (7) External string. Make it, offset-wise, look like a sequential string.
609 // (8) Is the external string one byte? If yes, go to (5).
610 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100612 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000613
Ben Murdoch097c5b22016-05-18 11:27:45 +0100614 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
615 external_string /* 7 */, check_underlying /* 1 */,
616 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
617
618 __ bind(&check_underlying);
619 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
620 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621
622 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100623 __ andb(rbx, Immediate(kIsNotStringMask |
624 kStringRepresentationMask |
625 kStringEncodingMask |
626 kShortExternalStringMask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100627 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 __ j(zero, &seq_two_byte_string); // Go to (9).
629
Ben Murdoch097c5b22016-05-18 11:27:45 +0100630 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100632 __ andb(rbx, Immediate(kIsNotStringMask |
633 kStringRepresentationMask |
634 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100635 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100636
Ben Murdoch097c5b22016-05-18 11:27:45 +0100637 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 // We check whether the subject string is a cons, since sequential strings
639 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000640 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
641 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100642 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
643 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 __ cmpp(rbx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100645 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 // (4) Cons string. Check that it's flat.
648 // Replace subject with first string and reload instance type.
Steve Block44f0eee2011-05-26 01:26:41 +0100649 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100651 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654
Ben Murdoch097c5b22016-05-18 11:27:45 +0100655 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000656 __ bind(&seq_one_byte_string);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100657 // rax: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000658 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
659 __ Set(rcx, 1); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100660
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100662 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000663 // r11: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100664 // Check that the irregexp code has been generated for the actual string
665 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +0000666 // smi (code flushing support)
667 __ JumpIfSmi(r11, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100668
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000669 // rdi: sequential subject string (or look-alike, external string)
670 // r15: original subject string
671 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100672 // r11: code
673 // Load used arguments before starting to push arguments for call to native
674 // RegExp code to avoid handling changing stack height.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675 // We have to use r15 instead of rdi to load the length because rdi might
676 // have been only made to look like a sequential string when it actually
677 // is an external string.
678 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
679 __ JumpIfNotSmi(rbx, &runtime);
680 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
681 __ j(above_equal, &runtime);
682 __ SmiToInteger64(rbx, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100683
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100684 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100685 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000686 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100687 // r11: code
688 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +0100690 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100691
Steve Block44f0eee2011-05-26 01:26:41 +0100692 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693 static const int kRegExpExecuteArguments = 9;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100694 int argument_slots_on_stack =
695 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
Steve Block44f0eee2011-05-26 01:26:41 +0100696 __ EnterApiExitFrame(argument_slots_on_stack);
697
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 // Argument 9: Pass current isolate address.
699 __ LoadAddress(kScratchRegister,
700 ExternalReference::isolate_address(isolate()));
701 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100702 kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100703
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 // Argument 8: Indicate that this is a direct call from JavaScript.
705 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100706 Immediate(1));
707
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708 // Argument 7: Start (high end) of backtracking stack memory area.
709 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
710 __ movp(r9, Operand(kScratchRegister, 0));
711 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
712 __ addp(r9, Operand(kScratchRegister, 0));
713 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
714
715 // Argument 6: Set the number of capture registers to zero to force global
716 // regexps to behave as non-global. This does not affect non-global regexps.
717 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100718#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
720 Immediate(0));
721#else
722 __ Set(r9, 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100723#endif
724
725 // Argument 5: static offsets vector buffer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000726 __ LoadAddress(
727 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100728 // Argument 5 passed in r8 on Linux and on the stack on Windows.
729#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100731#endif
732
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100733 // rdi: subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100734 // rbx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100736 // r11: code
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000737 // r14: slice offset
738 // r15: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100739
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100740 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000741 __ movp(arg_reg_2, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100742
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000743 // Argument 4: End of string data
744 // Argument 3: Start of string data
745 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
746 // Prepare start and end index of the input.
747 // Load the length from the original sliced string if that is the case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000748 __ addp(rbx, r14);
749 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
750 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000751
752 // rbx: start index of the input
753 // r14: end index of the input
754 // r15: original subject string
755 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
756 __ j(zero, &setup_two_byte, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 __ leap(arg_reg_4,
758 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
759 __ leap(arg_reg_3,
760 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000761 __ jmp(&setup_rest, Label::kNear);
762 __ bind(&setup_two_byte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 __ leap(arg_reg_4,
764 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
765 __ leap(arg_reg_3,
766 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000767 __ bind(&setup_rest);
768
769 // Argument 1: Original subject string.
770 // The original subject is in the previous stack frame. Therefore we have to
771 // use rbp, which points exactly to one pointer size below the previous rsp.
772 // (Because creating a new stack frame pushes the previous rbp onto the stack
773 // and thereby moves up rsp by one kPointerSize.)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 __ movp(arg_reg_1, r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100775
776 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100778 __ call(r11);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100779
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100781
782 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +0000783 Label success;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100784 Label exception;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 __ cmpl(rax, Immediate(1));
786 // We expect exactly one result since we force the called regexp to behave
787 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +0000788 __ j(equal, &success, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100789 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100790 __ j(equal, &exception);
791 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
792 // If none of the above, it can only be retry.
793 // Handle that in the runtime system.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100794 __ j(not_equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100795
796 // For failure return null.
797 __ LoadRoot(rax, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100799
800 // Load RegExp data.
801 __ bind(&success);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
803 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100804 __ SmiToInteger32(rax,
805 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
806 // Calculate number of capture registers (number_of_captures + 1) * 2.
807 __ leal(rdx, Operand(rax, rax, times_1, 2));
808
809 // rdx: Number of capture registers
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810 // Check that the fourth object is a JSArray object.
811 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
812 __ JumpIfSmi(r15, &runtime);
813 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
814 __ j(not_equal, &runtime);
815 // Check that the JSArray is in fast case.
816 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
817 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
818 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
819 __ j(not_equal, &runtime);
820 // Check that the last match info has space for the capture registers and the
821 // additional information. Ensure no overflow in add.
822 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
823 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
824 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
825 __ cmpl(rdx, rax);
826 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100827
828 // rbx: last_match_info backing store (FixedArray)
829 // rdx: number of capture registers
830 // Store the capture count.
831 __ Integer32ToSmi(kScratchRegister, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000832 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100833 kScratchRegister);
834 // Store last subject and last input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
836 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
837 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100838 __ RecordWriteField(rbx,
839 RegExpImpl::kLastSubjectOffset,
840 rax,
841 rdi,
842 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 __ movp(rax, rcx);
844 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100845 __ RecordWriteField(rbx,
846 RegExpImpl::kLastInputOffset,
847 rax,
848 rdi,
849 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100850
851 // Get the static offsets vector filled by the native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 __ LoadAddress(
853 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100854
855 // rbx: last_match_info backing store (FixedArray)
856 // rcx: offsets vector
857 // rdx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000858 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100859 // Capture register counter starts from number of capture registers and
860 // counts down until wraping after zero.
861 __ bind(&next_capture);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862 __ subp(rdx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000863 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100864 // Read the value from the static offsets vector buffer and make it a smi.
865 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100866 __ Integer32ToSmi(rdi, rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100867 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 __ movp(FieldOperand(rbx,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100869 rdx,
870 times_pointer_size,
871 RegExpImpl::kFirstCaptureOffset),
872 rdi);
873 __ jmp(&next_capture);
874 __ bind(&done);
875
876 // Return last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 __ movp(rax, r15);
878 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100879
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100880 __ bind(&exception);
881 // Result must now be exception. If there is no pending exception already a
882 // stack overflow (on the backtrack stack) was detected in RegExp code but
883 // haven't created the exception yet. Handle that in the runtime system.
884 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Steve Block44f0eee2011-05-26 01:26:41 +0100885 ExternalReference pending_exception_address(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 Isolate::kPendingExceptionAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +0100887 Operand pending_exception_operand =
888 masm->ExternalOperand(pending_exception_address, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 __ movp(rax, pending_exception_operand);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100890 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000891 __ cmpp(rax, rdx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100892 __ j(equal, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100893
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000894 // For exception, throw the exception again.
895 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100896
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 // Do the runtime call to execute the regexp.
898 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900
901 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100902 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 __ bind(&not_seq_nor_cons);
904 // Compare flags are still set from (3).
905 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
906
Ben Murdoch097c5b22016-05-18 11:27:45 +0100907 // (7) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100908 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100910 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
911 if (FLAG_debug_code) {
912 // Assert that we do not have a cons or slice (indirect strings) here.
913 // Sequential strings have already been ruled out.
914 __ testb(rbx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000915 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100916 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000917 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100918 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
920 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100922 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100923 __ testb(rbx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100924 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000925
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 // rdi: subject string (flat two-byte)
927 // rax: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100928 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 __ bind(&seq_two_byte_string);
930 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
931 __ Set(rcx, 0); // Type is two byte.
932 __ jmp(&check_code); // Go to (E).
933
934 // (10) Not a string or a short external string? If yes, bail out to runtime.
935 __ bind(&not_long_external);
936 // Catch non-string subject or short external string.
937 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
938 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
939 __ j(not_zero, &runtime);
940
Ben Murdoch097c5b22016-05-18 11:27:45 +0100941 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000942 // Load offset into r14 and replace subject string with parent.
943 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
944 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
945 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100946#endif // V8_INTERPRETED_REGEXP
947}
948
949
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100950static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000951 DCHECK(cc != equal);
952 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100953 || (cc == greater) || (cc == greater_equal));
954 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
955}
956
957
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958static void CheckInputType(MacroAssembler* masm, Register input,
959 CompareICState::State expected, Label* fail) {
960 Label ok;
961 if (expected == CompareICState::SMI) {
962 __ JumpIfNotSmi(input, fail);
963 } else if (expected == CompareICState::NUMBER) {
964 __ JumpIfSmi(input, &ok);
965 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
966 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100967 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968 // We could be strict about internalized/non-internalized here, but as long as
969 // hydrogen doesn't care, the stub doesn't have to care either.
970 __ bind(&ok);
971}
972
973
974static void BranchIfNotInternalizedString(MacroAssembler* masm,
975 Label* label,
976 Register object,
977 Register scratch) {
978 __ JumpIfSmi(object, label);
979 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
980 __ movzxbp(scratch,
981 FieldOperand(scratch, Map::kInstanceTypeOffset));
982 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
983 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
984 __ j(not_zero, label);
985}
986
987
988void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989 Label runtime_call, check_unequal_objects, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990 Condition cc = GetCondition();
991 Factory* factory = isolate()->factory();
992
993 Label miss;
994 CheckInputType(masm, rdx, left(), &miss);
995 CheckInputType(masm, rax, right(), &miss);
996
997 // Compare two smis.
998 Label non_smi, smi_done;
999 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1000 __ subp(rdx, rax);
1001 __ j(no_overflow, &smi_done);
1002 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1003 __ bind(&smi_done);
1004 __ movp(rax, rdx);
1005 __ ret(0);
1006 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001007
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001008 // The compare stub returns a positive, negative, or zero 64-bit integer
1009 // value in rax, corresponding to result of comparing the two inputs.
1010 // NOTICE! This code is only reached after a smi-fast-case check, so
1011 // it is certain that at least one operand isn't a smi.
1012
1013 // Two identical objects are equal unless they are both NaN or undefined.
1014 {
Ben Murdoch257744e2011-11-30 15:57:28 +00001015 Label not_identical;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001016 __ cmpp(rax, rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001017 __ j(not_equal, &not_identical, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001018
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001020 // Check for undefined. undefined OP undefined is false even though
1021 // undefined == undefined.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001022 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001023 Label check_for_nan;
1024 __ j(not_equal, &check_for_nan, Label::kNear);
1025 __ Set(rax, NegativeComparisonResult(cc));
1026 __ ret(0);
1027 __ bind(&check_for_nan);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001028 }
1029
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001031 // so we do the second best thing - test it ourselves.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001032 Label heap_number;
1033 // If it's not a heap number, then return equal for (in)equality operator.
1034 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1035 factory->heap_number_map());
1036 __ j(equal, &heap_number, Label::kNear);
1037 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1039 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 // Call runtime on identical objects. Otherwise return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001041 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)));
1042 __ j(above_equal, &runtime_call, Label::kFar);
1043 // Call runtime on identical symbols since we need to throw a TypeError.
1044 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
1045 __ j(equal, &runtime_call, Label::kFar);
1046 // Call runtime on identical SIMD values since we must throw a TypeError.
1047 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
1048 __ j(equal, &runtime_call, Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001049 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001050 __ Set(rax, EQUAL);
1051 __ ret(0);
1052
1053 __ bind(&heap_number);
1054 // It is a heap number, so return equal if it's not NaN.
1055 // For NaN, return 1 for every condition except greater and
1056 // greater-equal. Return -1 for them, so the comparison yields
1057 // false for all conditions except not-equal.
1058 __ Set(rax, EQUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001059 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1060 __ Ucomisd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001061 __ setcc(parity_even, rax);
1062 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1063 if (cc == greater_equal || cc == greater) {
1064 __ negp(rax);
1065 }
1066 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001067
1068 __ bind(&not_identical);
1069 }
1070
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001071 if (cc == equal) { // Both strict and non-strict.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001072 Label slow; // Fallthrough label.
1073
1074 // If we're doing a strict equality comparison, we don't have to do
1075 // type conversion, so we generate code to do fast comparison for objects
1076 // and oddballs. Non-smi numbers and strings still go through the usual
1077 // slow-case code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001078 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001079 // If either is a Smi (we know that not both are), then they can only
1080 // be equal if the other is a HeapNumber. If so, use the slow case.
1081 {
1082 Label not_smis;
1083 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
1084
1085 // Check if the non-smi operand is a heap number.
1086 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
Ben Murdoch257744e2011-11-30 15:57:28 +00001087 factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001088 // If heap number, handle it in the slow case.
1089 __ j(equal, &slow);
1090 // Return non-equal. ebx (the lower half of rbx) is not zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001091 __ movp(rax, rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001092 __ ret(0);
1093
1094 __ bind(&not_smis);
1095 }
1096
1097 // If either operand is a JSObject or an oddball value, then they are not
1098 // equal since their pointers are different
1099 // There is no test for undetectability in strict equality.
1100
1101 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001103 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001104 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001105 __ j(below, &first_non_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001106 // Return non-zero (rax (not rax) is not zero)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001107 Label return_not_equal;
1108 STATIC_ASSERT(kHeapObjectTag != 0);
1109 __ bind(&return_not_equal);
1110 __ ret(0);
1111
1112 __ bind(&first_non_object);
1113 // Check for oddballs: true, false, null, undefined.
1114 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1115 __ j(equal, &return_not_equal);
1116
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001118 __ j(above_equal, &return_not_equal);
1119
1120 // Check for oddballs: true, false, null, undefined.
1121 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1122 __ j(equal, &return_not_equal);
1123
1124 // Fall through to the general case.
1125 }
1126 __ bind(&slow);
1127 }
1128
1129 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130 Label non_number_comparison;
1131 Label unordered;
1132 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1133 __ xorl(rax, rax);
1134 __ xorl(rcx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135 __ Ucomisd(xmm0, xmm1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001136
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 // Don't base result on EFLAGS when a NaN is involved.
1138 __ j(parity_even, &unordered, Label::kNear);
1139 // Return a result of -1, 0, or 1, based on EFLAGS.
1140 __ setcc(above, rax);
1141 __ setcc(below, rcx);
1142 __ subp(rax, rcx);
1143 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001144
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 // If one of the numbers was NaN, then the result is always false.
1146 // The cc is never not-equal.
1147 __ bind(&unordered);
1148 DCHECK(cc != not_equal);
1149 if (cc == less || cc == less_equal) {
1150 __ Set(rax, 1);
1151 } else {
1152 __ Set(rax, -1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001153 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001155
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 // The number comparison code did not provide a valid result.
1157 __ bind(&non_number_comparison);
1158
1159 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001160 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 if (cc == equal) {
1162 BranchIfNotInternalizedString(
1163 masm, &check_for_strings, rax, kScratchRegister);
1164 BranchIfNotInternalizedString(
1165 masm, &check_for_strings, rdx, kScratchRegister);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001167 // We've already checked for object identity, so if both operands are
1168 // internalized strings they aren't equal. Register rax (not rax) already
1169 // holds a non-zero value, which indicates not equal, so just return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001170 __ ret(0);
1171 }
1172
1173 __ bind(&check_for_strings);
1174
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1176 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001177
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 // Inline comparison of one-byte strings.
1179 if (cc == equal) {
1180 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001181 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001182 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1183 rdi, r8);
Ben Murdoch257744e2011-11-30 15:57:28 +00001184 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001185
1186#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001187 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001188#endif
1189
1190 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001192 // Not strict equality. Objects are unequal if
1193 // they are both JSObjects and not undetectable,
1194 // and their pointers are different.
Ben Murdochda12d292016-06-02 14:46:10 +01001195 Label return_equal, return_unequal, undetectable;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001196 // At most one is a smi, so we can test for smi by adding the two.
1197 // A smi plus a heap object has the low bit set, a heap object plus
1198 // a heap object has the low bit clear.
1199 STATIC_ASSERT(kSmiTag == 0);
1200 STATIC_ASSERT(kSmiTagMask == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 __ leap(rcx, Operand(rax, rdx, times_1, 0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001202 __ testb(rcx, Immediate(kSmiTagMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203 __ j(not_zero, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001204
1205 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
1206 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001207 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1208 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001209 __ j(not_zero, &undetectable, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001210 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1211 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001212 __ j(not_zero, &return_unequal, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001213
1214 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE);
1215 __ j(below, &runtime_call, Label::kNear);
1216 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE);
1217 __ j(below, &runtime_call, Label::kNear);
1218
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001219 __ bind(&return_unequal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001220 // Return non-equal by returning the non-zero object pointer in rax.
1221 __ ret(0);
1222
1223 __ bind(&undetectable);
1224 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1225 Immediate(1 << Map::kIsUndetectable));
Ben Murdochda12d292016-06-02 14:46:10 +01001226 __ j(zero, &return_unequal, Label::kNear);
1227
1228 // If both sides are JSReceivers, then the result is false according to
1229 // the HTML specification, which says that only comparisons with null or
1230 // undefined are affected by special casing for document.all.
1231 __ CmpInstanceType(rbx, ODDBALL_TYPE);
1232 __ j(zero, &return_equal, Label::kNear);
1233 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1234 __ j(not_zero, &return_unequal, Label::kNear);
1235
1236 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001237 __ Set(rax, EQUAL);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001238 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001239 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001240 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001241
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001242 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001243 {
1244 FrameScope scope(masm, StackFrame::INTERNAL);
1245 __ Push(rdx);
1246 __ Push(rax);
1247 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1248 }
1249 // Turn true into 0 and false into some non-zero value.
1250 STATIC_ASSERT(EQUAL == 0);
1251 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
1252 __ subp(rax, rdx);
1253 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001254 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001255 // Push arguments below the return address to prepare jump to builtin.
1256 __ PopReturnAddressTo(rcx);
1257 __ Push(rdx);
1258 __ Push(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260 __ PushReturnAddressFrom(rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001261 __ TailCallRuntime(Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001262 }
1263
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 __ bind(&miss);
1265 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001266}
1267
1268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001269static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1270 // rax : number of arguments to the construct function
1271 // rbx : feedback vector
1272 // rdx : slot in feedback vector (Smi)
1273 // rdi : the function to call
1274 FrameScope scope(masm, StackFrame::INTERNAL);
1275
1276 // Number-of-arguments register must be smi-tagged to call out.
1277 __ Integer32ToSmi(rax, rax);
1278 __ Push(rax);
1279 __ Push(rdi);
1280 __ Integer32ToSmi(rdx, rdx);
1281 __ Push(rdx);
1282 __ Push(rbx);
1283
1284 __ CallStub(stub);
1285
1286 __ Pop(rbx);
1287 __ Pop(rdx);
1288 __ Pop(rdi);
1289 __ Pop(rax);
1290 __ SmiToInteger32(rax, rax);
1291}
1292
1293
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001294static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001295 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001296 // are uninitialized, monomorphic (indicated by a JSFunction), and
1297 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001298 // rax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001299 // rbx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001300 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001301 // rdi : the function to call
1302 Isolate* isolate = masm->isolate();
Ben Murdoch61f157c2016-09-16 13:49:30 +01001303 Label initialize, done, miss, megamorphic, not_array_function;
1304 Label done_initialize_count, done_increment_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001305
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001306 // Load the cache state into r11.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001308 __ movp(r11,
1309 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001310
1311 // A monomorphic cache hit or an already megamorphic state: invoke the
1312 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1314 // at this position in a symbol (see static asserts in
1315 // type-feedback-vector.h).
1316 Label check_allocation_site;
1317 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001318 __ j(equal, &done_increment_count, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001319 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1320 __ j(equal, &done, Label::kFar);
1321 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1322 Heap::kWeakCellMapRootIndex);
1323 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001325 // If the weak cell is cleared, we have a new chance to become monomorphic.
1326 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1327 __ j(equal, &initialize);
1328 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001330 __ bind(&check_allocation_site);
1331 // If we came here, we need to see if we are the array function.
1332 // If we didn't have a matching function, and we didn't find the megamorph
1333 // sentinel, then we have in the slot either some other function or an
1334 // AllocationSite.
1335 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1336 __ j(not_equal, &miss);
1337
1338 // Make sure the function is the Array() function
1339 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1340 __ cmpp(rdi, r11);
1341 __ j(not_equal, &megamorphic);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001342 __ jmp(&done_increment_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001343
1344 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001345
1346 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1347 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001348 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001350 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1351 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 __ bind(&megamorphic);
1353 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1354 TypeFeedbackVector::MegamorphicSentinel(isolate));
1355 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001356
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 // An uninitialized cache is patched with the function or sentinel to
1358 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001359 __ bind(&initialize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 // Make sure the function is the Array() function
1362 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
1363 __ cmpp(rdi, r11);
1364 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001366 CreateAllocationSiteStub create_stub(isolate);
1367 CallStubInRecordCallTarget(masm, &create_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001368 __ jmp(&done_initialize_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001369
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001370 __ bind(&not_array_function);
1371 CreateWeakCellStub weak_cell_stub(isolate);
1372 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001373
1374 __ bind(&done_initialize_count);
1375 // Initialize the call counter.
1376 __ SmiToInteger32(rdx, rdx);
1377 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
1378 FixedArray::kHeaderSize + kPointerSize),
1379 Smi::FromInt(1));
1380 __ jmp(&done);
1381
1382 __ bind(&done_increment_count);
1383
1384 // Increment the call count for monomorphic function calls.
1385 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1386 FixedArray::kHeaderSize + kPointerSize),
1387 Smi::FromInt(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001388
1389 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001390 __ Integer32ToSmi(rdx, rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001391}
1392
1393
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001394void CallConstructStub::Generate(MacroAssembler* masm) {
1395 // rax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396 // rbx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001397 // rdx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001398 // rdi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001399
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 Label non_function;
1401 // Check that the constructor is not a smi.
1402 __ JumpIfSmi(rdi, &non_function);
1403 // Check that constructor is a JSFunction.
1404 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
1405 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001406
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001407 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001408
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 Label feedback_register_initialized;
1411 // Put the AllocationSite from the feedback vector into rbx, or undefined.
1412 __ movp(rbx,
1413 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1414 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
1415 __ j(equal, &feedback_register_initialized, Label::kNear);
1416 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1417 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001419 __ AssertUndefinedOrAllocationSite(rbx);
1420
1421 // Pass new target to construct stub.
1422 __ movp(rdx, rdi);
1423
1424 // Tail call to the function-specific construct stub (still in the caller
1425 // context at this point).
1426 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1427 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
1428 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
1429 __ jmp(rcx);
1430
1431 __ bind(&non_function);
1432 __ movp(rdx, rdi);
1433 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1434}
1435
1436
1437void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1438 // rdi - function
1439 // rdx - slot id
1440 // rbx - vector
1441 // rcx - allocation site (loaded from vector[slot]).
1442 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
1443 __ cmpp(rdi, r8);
1444 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001445
1446 __ movp(rax, Immediate(arg_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447
1448 // Increment the call count for monomorphic function calls.
1449 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1450 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001451 Smi::FromInt(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001452
1453 __ movp(rbx, rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001454 __ movp(rdx, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001455 ArrayConstructorStub stub(masm->isolate(), arg_count());
1456 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457}
1458
1459
1460void CallICStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 // ----------- S t a t e -------------
1462 // -- rdi - function
1463 // -- rdx - slot id
1464 // -- rbx - vector
1465 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 int argc = arg_count();
1469 StackArgumentsAccessor args(rsp, argc);
1470 ParameterCount actual(argc);
1471
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001472 // The checks. First, does rdi match the recorded monomorphic target?
1473 __ SmiToInteger32(rdx, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 __ movp(rcx,
1475 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1476
1477 // We don't know that we have a weak cell. We might have a private symbol
1478 // or an AllocationSite, but the memory is safe to examine.
1479 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1480 // FixedArray.
1481 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1482 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1483 // computed, meaning that it can't appear to be a pointer. If the low bit is
1484 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1485 // to be a pointer.
1486 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1487 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1488 WeakCell::kValueOffset &&
1489 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1490
1491 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 __ j(not_equal, &extra_checks_or_miss);
1493
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001494 // The compare above could have been a SMI/SMI comparison. Guard against this
1495 // convincing us that we have a monomorphic JSFunction.
1496 __ JumpIfSmi(rdi, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001497
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001498 // Increment the call count for monomorphic function calls.
1499 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
1500 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001501 Smi::FromInt(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001503 __ bind(&call_function);
1504 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001505 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1506 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001507 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001508
1509 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 __ j(equal, &call);
1514
1515 // Check if we have an allocation site.
1516 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
1517 Heap::kAllocationSiteMapRootIndex);
1518 __ j(not_equal, &not_allocation_site);
1519
1520 // We have an allocation site.
1521 HandleArrayCase(masm, &miss);
1522
1523 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001524
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001525 // The following cases attempt to handle MISS cases without going to the
1526 // runtime.
1527 if (FLAG_trace_ic) {
1528 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001529 }
1530
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001531 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1532 __ j(equal, &uninitialized);
1533
1534 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1535 // to handle it here. More complex cases are dealt with in the runtime.
1536 __ AssertNotSmi(rcx);
1537 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
1538 __ j(not_equal, &miss);
1539 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1540 TypeFeedbackVector::MegamorphicSentinel(isolate));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541
1542 __ bind(&call);
1543 __ Set(rax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001544 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001546
1547 __ bind(&uninitialized);
1548
1549 // We are going monomorphic, provided we actually have a JSFunction.
1550 __ JumpIfSmi(rdi, &miss);
1551
1552 // Goto miss case if we do not have a function.
1553 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1554 __ j(not_equal, &miss);
1555
1556 // Make sure the function is not the Array() function, which requires special
1557 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001559 __ cmpp(rdi, rcx);
1560 __ j(equal, &miss);
1561
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 // Make sure the function belongs to the same native context.
1563 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset));
1564 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX));
1565 __ cmpp(rcx, NativeContextOperand());
1566 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001567
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 // Initialize the call counter.
1569 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
1570 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001571 Smi::FromInt(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 // Store the function. Use a stub since we need a frame for allocation.
1574 // rbx - vector
1575 // rdx - slot (needs to be in smi form)
1576 // rdi - function
1577 {
1578 FrameScope scope(masm, StackFrame::INTERNAL);
1579 CreateWeakCellStub create_stub(isolate);
1580
1581 __ Integer32ToSmi(rdx, rdx);
1582 __ Push(rdi);
1583 __ CallStub(&create_stub);
1584 __ Pop(rdi);
1585 }
1586
1587 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001588
1589 // We are here because tracing is on or we encountered a MISS case we can't
1590 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001591 __ bind(&miss);
1592 GenerateMiss(masm);
1593
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001595
1596 // Unreachable
1597 __ int3();
1598}
1599
1600
1601void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 // Push the receiver and the function and feedback info.
1605 __ Push(rdi);
1606 __ Push(rbx);
1607 __ Integer32ToSmi(rdx, rdx);
1608 __ Push(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 // Call the entry.
1611 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 // Move result to edi and exit the internal frame.
1614 __ movp(rdi, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001615}
1616
1617
Steve Block44f0eee2011-05-26 01:26:41 +01001618bool CEntryStub::NeedsImmovableCode() {
1619 return false;
1620}
1621
1622
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001623void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1624 CEntryStub::GenerateAheadOfTime(isolate);
1625 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1626 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001627 // It is important that the store buffer overflow stubs are generated first.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001628 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001629 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001631 BinaryOpICStub::GenerateAheadOfTime(isolate);
1632 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 StoreFastElementStub::GenerateAheadOfTime(isolate);
1634 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001635}
1636
1637
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001638void CodeStub::GenerateFPStubs(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001639}
1640
1641
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1643 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1644 stub.GetCode();
1645 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1646 save_doubles.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001647}
1648
1649
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001650void CEntryStub::Generate(MacroAssembler* masm) {
1651 // rax: number of arguments including receiver
1652 // rbx: pointer to C function (C callee-saved)
1653 // rbp: frame pointer of calling JS frame (restored after C call)
1654 // rsp: stack pointer (restored after C call)
1655 // rsi: current context (restored)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001656 //
1657 // If argv_in_register():
1658 // r15: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001659
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001660 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001661
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001662#ifdef _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001663 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
1664 // stack to be aligned to 16 bytes. It only allows a single-word to be
1665 // returned in register rax. Larger return sizes must be written to an address
1666 // passed as a hidden first argument.
1667 const Register kCCallArg0 = rcx;
1668 const Register kCCallArg1 = rdx;
1669 const Register kCCallArg2 = r8;
1670 const Register kCCallArg3 = r9;
1671 const int kArgExtraStackSpace = 2;
1672 const int kMaxRegisterResultSize = 1;
1673#else
1674 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
1675 // are returned in rax, and a struct of two pointers are returned in rax+rdx.
1676 // Larger return sizes must be written to an address passed as a hidden first
1677 // argument.
1678 const Register kCCallArg0 = rdi;
1679 const Register kCCallArg1 = rsi;
1680 const Register kCCallArg2 = rdx;
1681 const Register kCCallArg3 = rcx;
1682 const int kArgExtraStackSpace = 0;
1683 const int kMaxRegisterResultSize = 2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001684#endif // _WIN64
Ben Murdoch097c5b22016-05-18 11:27:45 +01001685
1686 // Enter the exit frame that transitions from JavaScript to C++.
1687 int arg_stack_space =
1688 kArgExtraStackSpace +
1689 (result_size() <= kMaxRegisterResultSize ? 0 : result_size());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 if (argv_in_register()) {
1691 DCHECK(!save_doubles());
1692 __ EnterApiExitFrame(arg_stack_space);
1693 // Move argc into r14 (argv is already in r15).
1694 __ movp(r14, rax);
1695 } else {
1696 __ EnterExitFrame(arg_stack_space, save_doubles());
1697 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001698
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001699 // rbx: pointer to builtin function (C callee-saved).
1700 // rbp: frame pointer of exit frame (restored after C call).
1701 // rsp: stack pointer (restored after C call).
1702 // r14: number of arguments including receiver (C callee-saved).
Steve Block44f0eee2011-05-26 01:26:41 +01001703 // r15: argv pointer (C callee-saved).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001704
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705 // Check stack alignment.
1706 if (FLAG_debug_code) {
1707 __ CheckStackAlignment();
1708 }
1709
Ben Murdoch097c5b22016-05-18 11:27:45 +01001710 // Call C function. The arguments object will be created by stubs declared by
1711 // DECLARE_RUNTIME_FUNCTION().
1712 if (result_size() <= kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 // Pass a pointer to the Arguments object as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001714 // Return result in single register (rax), or a register pair (rax, rdx).
1715 __ movp(kCCallArg0, r14); // argc.
1716 __ movp(kCCallArg1, r15); // argv.
1717 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001719 DCHECK_LE(result_size(), 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 // Pass a pointer to the result location as the first argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001721 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 // Pass a pointer to the Arguments object as the second argument.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001723 __ movp(kCCallArg1, r14); // argc.
1724 __ movp(kCCallArg2, r15); // argv.
1725 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001727 __ call(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728
Ben Murdoch097c5b22016-05-18 11:27:45 +01001729 if (result_size() > kMaxRegisterResultSize) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001730 // Read result values stored on stack. Result is stored
Ben Murdoch097c5b22016-05-18 11:27:45 +01001731 // above the the two Arguments object slots on Win64.
1732 DCHECK_LE(result_size(), 3);
1733 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
1734 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
1735 if (result_size() > 2) {
1736 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2));
1737 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001739 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741 // Check result for exception sentinel.
1742 Label exception_returned;
1743 __ CompareRoot(rax, Heap::kExceptionRootIndex);
1744 __ j(equal, &exception_returned);
1745
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 // Check that there is no pending exception, otherwise we
1747 // should have returned the exception sentinel.
1748 if (FLAG_debug_code) {
1749 Label okay;
1750 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 ExternalReference pending_exception_address(
1752 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 Operand pending_exception_operand =
1754 masm->ExternalOperand(pending_exception_address);
1755 __ cmpp(r14, pending_exception_operand);
1756 __ j(equal, &okay, Label::kNear);
1757 __ int3();
1758 __ bind(&okay);
1759 }
1760
1761 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001762 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001763 __ ret(0);
1764
1765 // Handling of exception.
1766 __ bind(&exception_returned);
1767
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768 ExternalReference pending_handler_context_address(
1769 Isolate::kPendingHandlerContextAddress, isolate());
1770 ExternalReference pending_handler_code_address(
1771 Isolate::kPendingHandlerCodeAddress, isolate());
1772 ExternalReference pending_handler_offset_address(
1773 Isolate::kPendingHandlerOffsetAddress, isolate());
1774 ExternalReference pending_handler_fp_address(
1775 Isolate::kPendingHandlerFPAddress, isolate());
1776 ExternalReference pending_handler_sp_address(
1777 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001778
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001779 // Ask the runtime for help to determine the handler. This will set rax to
1780 // contain the current pending exception, don't clobber it.
1781 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1782 isolate());
1783 {
1784 FrameScope scope(masm, StackFrame::MANUAL);
1785 __ movp(arg_reg_1, Immediate(0)); // argc.
1786 __ movp(arg_reg_2, Immediate(0)); // argv.
1787 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
1788 __ PrepareCallCFunction(3);
1789 __ CallCFunction(find_handler, 3);
1790 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001791
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001792 // Retrieve the handler context, SP and FP.
1793 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
1794 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
1795 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001796
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797 // If the handler is a JS frame, restore the context to the frame. Note that
1798 // the context will be set to (rsi == 0) for non-JS frames.
1799 Label skip;
1800 __ testp(rsi, rsi);
1801 __ j(zero, &skip, Label::kNear);
1802 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
1803 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001804
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805 // Compute the handler entry address and jump to it.
1806 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
1807 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
1808 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
1809 __ jmp(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001810}
1811
1812
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001813void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001814 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001815 Label not_outermost_js, not_outermost_js_2;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001816
1817 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1818
Steve Block44f0eee2011-05-26 01:26:41 +01001819 { // NOLINT. Scope block confuses linter.
1820 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001821 // Set up frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822 __ pushq(rbp);
1823 __ movp(rbp, rsp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001824
Ben Murdochda12d292016-06-02 14:46:10 +01001825 // Push the stack frame type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001826 int marker = type();
Ben Murdochda12d292016-06-02 14:46:10 +01001827 __ Push(Smi::FromInt(marker)); // context slot
1828 ExternalReference context_address(Isolate::kContextAddress, isolate());
1829 __ Load(kScratchRegister, context_address);
1830 __ Push(kScratchRegister); // context
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 // Save callee-saved registers (X64/X32/Win64 calling conventions).
1832 __ pushq(r12);
1833 __ pushq(r13);
1834 __ pushq(r14);
1835 __ pushq(r15);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001836#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
1838 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001839#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 __ pushq(rbx);
1841
1842#ifdef _WIN64
1843 // On Win64 XMM6-XMM15 are callee-save
1844 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1845 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
1846 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
1847 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
1848 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
1849 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
1850 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
1851 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
1852 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
1853 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
1854 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
1855#endif
Steve Block44f0eee2011-05-26 01:26:41 +01001856
1857 // Set up the roots and smi constant registers.
1858 // Needs to be done before any further smi loads.
Steve Block44f0eee2011-05-26 01:26:41 +01001859 __ InitializeRootRegister();
1860 }
1861
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001862 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001863 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001864 {
1865 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866 __ Push(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001867 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001868
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001869 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001870 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001871 __ Load(rax, js_entry_sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872 __ testp(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001873 __ j(not_zero, &not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001874 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001875 __ movp(rax, rbp);
Steve Block44f0eee2011-05-26 01:26:41 +01001876 __ Store(js_entry_sp, rax);
Steve Block053d10c2011-06-13 19:13:29 +01001877 Label cont;
1878 __ jmp(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001879 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001880 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
1881 __ bind(&cont);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001882
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001883 // Jump to a faked try block that does the invoke, with a faked catch
1884 // block that sets the pending exception.
1885 __ jmp(&invoke);
1886 __ bind(&handler_entry);
1887 handler_offset_ = handler_entry.pos();
1888 // Caught exception: Store result (exception) in the pending exception
1889 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00001890 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001892 __ Store(pending_exception, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001893 __ LoadRoot(rax, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001894 __ jmp(&exit);
1895
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001896 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001897 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001898 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001899
1900 // Clear any pending exceptions.
Steve Block44f0eee2011-05-26 01:26:41 +01001901 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
1902 __ Store(pending_exception, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001903
1904 // Fake a receiver (NULL).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 __ Push(Immediate(0)); // receiver
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001906
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001907 // Invoke the function by calling through JS entry trampoline builtin and
1908 // pop the faked function when we return. We load the address from an
1909 // external reference instead of inlining the call target address directly
1910 // in the code, because the builtin stubs may not have been generated yet
1911 // at the time this code is generated.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001913 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001914 isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001915 __ Load(rax, construct_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001916 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01001918 __ Load(rax, entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001919 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001920 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001921 __ call(kScratchRegister);
1922
1923 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001924 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001925
Steve Block053d10c2011-06-13 19:13:29 +01001926 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01001927 // Check if the current stack frame is marked as the outermost JS frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928 __ Pop(rbx);
Steve Block053d10c2011-06-13 19:13:29 +01001929 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001930 __ j(not_equal, &not_outermost_js_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001931 __ Move(kScratchRegister, js_entry_sp);
1932 __ movp(Operand(kScratchRegister, 0), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001933 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001934
1935 // Restore the top frame descriptor from the stack.
Steve Block053d10c2011-06-13 19:13:29 +01001936 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001937 __ Pop(c_entry_fp_operand);
Steve Block44f0eee2011-05-26 01:26:41 +01001938 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001939
1940 // Restore callee-saved registers (X64 conventions).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001941#ifdef _WIN64
1942 // On Win64 XMM6-XMM15 are callee-save
1943 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
1944 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
1945 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
1946 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
1947 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
1948 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
1949 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
1950 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
1951 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
1952 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
1953 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
1954#endif
1955
1956 __ popq(rbx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001957#ifdef _WIN64
1958 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001959 __ popq(rsi);
1960 __ popq(rdi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001961#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 __ popq(r15);
1963 __ popq(r14);
1964 __ popq(r13);
1965 __ popq(r12);
1966 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001967
1968 // Restore frame pointer and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 __ popq(rbp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001970 __ ret(0);
1971}
1972
1973
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001974// -------------------------------------------------------------------------
1975// StringCharCodeAtGenerator
1976
1977void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001978 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001979 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1980 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001981
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001982 // Fetch the instance type of the receiver into result register.
1983 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
1984 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1985 // If the receiver is not a string trigger the non-string case.
1986 __ testb(result_, Immediate(kIsNotStringMask));
1987 __ j(not_zero, receiver_not_string_);
1988 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001989
1990 // If the index is non-smi trigger the non-smi case.
1991 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001992 __ bind(&got_smi_index_);
1993
1994 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001995 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001996 __ j(above_equal, index_out_of_range_);
1997
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001998 __ SmiToInteger32(index_, index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001999
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002000 StringCharLoadGenerator::Generate(
2001 masm, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002002
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002003 __ Integer32ToSmi(result_, result_);
2004 __ bind(&exit_);
2005}
2006
2007
2008void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002009 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002010 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002011 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002012
Ben Murdoch257744e2011-11-30 15:57:28 +00002013 Factory* factory = masm->isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002014 // Index is not a smi.
2015 __ bind(&index_not_smi_);
2016 // If index is a heap number, try converting it to an integer.
Ben Murdoch257744e2011-11-30 15:57:28 +00002017 __ CheckMap(index_,
2018 factory->heap_number_map(),
2019 index_not_number_,
2020 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002021 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 if (embed_mode == PART_OF_IC_HANDLER) {
2023 __ Push(LoadWithVectorDescriptor::VectorRegister());
2024 __ Push(LoadDescriptor::SlotRegister());
2025 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 __ Push(object_);
2027 __ Push(index_); // Consumed by runtime conversion function.
Ben Murdoch61f157c2016-09-16 13:49:30 +01002028 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002029 if (!index_.is(rax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002030 // Save the conversion result before the pop instructions below
2031 // have a chance to overwrite it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002032 __ movp(index_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002033 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002034 __ Pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002035 if (embed_mode == PART_OF_IC_HANDLER) {
2036 __ Pop(LoadDescriptor::SlotRegister());
2037 __ Pop(LoadWithVectorDescriptor::VectorRegister());
2038 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002039 // Reload the instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002041 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2042 call_helper.AfterCall(masm);
2043 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002044 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002045 // Otherwise, return to the fast path.
2046 __ jmp(&got_smi_index_);
2047
2048 // Call runtime. We get here when the receiver is a string and the
2049 // index is a number, but the code of getting the actual character
2050 // is too complex (e.g., when the string needs to be flattened).
2051 __ bind(&call_runtime_);
2052 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002053 __ Push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002054 __ Integer32ToSmi(index_, index_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 __ Push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002056 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002057 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002059 }
2060 call_helper.AfterCall(masm);
2061 __ jmp(&exit_);
2062
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002063 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002064}
2065
2066
2067// -------------------------------------------------------------------------
2068// StringCharFromCodeGenerator
2069
2070void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2071 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2072 __ JumpIfNotSmi(code_, &slow_case_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002073 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002074 __ j(above, &slow_case_);
2075
2076 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2077 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002078 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002079 FixedArray::kHeaderSize));
2080 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2081 __ j(equal, &slow_case_);
2082 __ bind(&exit_);
2083}
2084
2085
2086void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002087 MacroAssembler* masm,
2088 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002089 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002090
2091 __ bind(&slow_case_);
2092 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002093 __ Push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002095 if (!result_.is(rax)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 __ movp(result_, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002097 }
2098 call_helper.AfterCall(masm);
2099 __ jmp(&exit_);
2100
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002101 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002102}
2103
2104
2105void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2106 Register dest,
2107 Register src,
2108 Register count,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002109 String::Encoding encoding) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002110 // Nothing to do for zero characters.
Ben Murdoch257744e2011-11-30 15:57:28 +00002111 Label done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002112 __ testl(count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002113 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002114
2115 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002116 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002117 STATIC_ASSERT(2 == sizeof(uc16));
2118 __ addl(count, count);
2119 }
2120
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002121 // Copy remaining characters.
2122 Label loop;
2123 __ bind(&loop);
2124 __ movb(kScratchRegister, Operand(src, 0));
2125 __ movb(Operand(dest, 0), kScratchRegister);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002126 __ incp(src);
2127 __ incp(dest);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002128 __ decl(count);
2129 __ j(not_zero, &loop);
2130
2131 __ bind(&done);
2132}
2133
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002134
2135void SubStringStub::Generate(MacroAssembler* masm) {
2136 Label runtime;
2137
2138 // Stack frame on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002139 // rsp[0] : return address
2140 // rsp[8] : to
2141 // rsp[16] : from
2142 // rsp[24] : string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002143
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144 enum SubStringStubArgumentIndices {
2145 STRING_ARGUMENT_INDEX,
2146 FROM_ARGUMENT_INDEX,
2147 TO_ARGUMENT_INDEX,
2148 SUB_STRING_ARGUMENT_COUNT
2149 };
2150
2151 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2152 ARGUMENTS_DONT_CONTAIN_RECEIVER);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002153
2154 // Make sure first argument is a string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002155 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002156 STATIC_ASSERT(kSmiTag == 0);
2157 __ testl(rax, Immediate(kSmiTagMask));
2158 __ j(zero, &runtime);
2159 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2160 __ j(NegateCondition(is_string), &runtime);
2161
2162 // rax: string
2163 // rbx: instance type
2164 // Calculate length of sub string using the smi values.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002165 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2166 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
Ben Murdochf87a2032010-10-22 12:50:53 +01002167 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002168
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002169 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002171 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 // Shorter than original string's length: an actual substring.
2173 __ j(below, &not_original_string, Label::kNear);
2174 // Longer than original string's length or negative: unsafe arguments.
2175 __ j(above, &runtime);
2176 // Return original string.
2177 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002178 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002179 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002180 __ bind(&not_original_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002181
2182 Label single_char;
2183 __ SmiCompare(rcx, Smi::FromInt(1));
2184 __ j(equal, &single_char);
2185
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002186 __ SmiToInteger32(rcx, rcx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002187
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002188 // rax: string
2189 // rbx: instance type
2190 // rcx: sub string length
2191 // rdx: from index (smi)
2192 // Deal with different string types: update the index if necessary
2193 // and put the underlying string into edi.
2194 Label underlying_unpacked, sliced_string, seq_or_external_string;
2195 // If the string is not indirect, it can only be sequential or external.
2196 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2197 STATIC_ASSERT(kIsIndirectStringMask != 0);
2198 __ testb(rbx, Immediate(kIsIndirectStringMask));
2199 __ j(zero, &seq_or_external_string, Label::kNear);
2200
2201 __ testb(rbx, Immediate(kSlicedNotConsMask));
2202 __ j(not_zero, &sliced_string, Label::kNear);
2203 // Cons string. Check whether it is flat, then fetch first part.
2204 // Flat cons strings have an empty second part.
2205 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002207 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002209 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002210 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002211 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002212 __ jmp(&underlying_unpacked, Label::kNear);
2213
2214 __ bind(&sliced_string);
2215 // Sliced string. Fetch parent and correct start index by offset.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
2217 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002218 // Update instance type.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002219 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002220 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2221 __ jmp(&underlying_unpacked, Label::kNear);
2222
2223 __ bind(&seq_or_external_string);
2224 // Sequential or external string. Just move string to the correct register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002225 __ movp(rdi, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002226
2227 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002228
Ben Murdoch589d6972011-11-30 16:04:58 +00002229 if (FLAG_string_slices) {
2230 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002231 // rdi: underlying subject string
2232 // rbx: instance type of underlying subject string
2233 // rdx: adjusted start index (smi)
2234 // rcx: length
Ben Murdoch589d6972011-11-30 16:04:58 +00002235 // If coming from the make_two_character_string path, the string
2236 // is too short to be sliced anyways.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002237 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
Ben Murdoch589d6972011-11-30 16:04:58 +00002238 // Short slice. Copy instead of slicing.
2239 __ j(less, &copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002240 // Allocate new sliced string. At this point we do not reload the instance
2241 // type including the string encoding because we simply rely on the info
2242 // provided by the original string. It does not matter if the original
2243 // string's encoding is wrong because we always have to recheck encoding of
2244 // the newly created string's parent anyways due to externalized strings.
2245 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002247 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2248 __ testb(rbx, Immediate(kStringEncodingMask));
2249 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002250 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002251 __ jmp(&set_slice_header, Label::kNear);
2252 __ bind(&two_byte_slice);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002253 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002254 __ bind(&set_slice_header);
Ben Murdoch589d6972011-11-30 16:04:58 +00002255 __ Integer32ToSmi(rcx, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002256 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
2257 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
Ben Murdoch589d6972011-11-30 16:04:58 +00002258 Immediate(String::kEmptyHashField));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002259 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
2260 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002261 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002262 __ ret(3 * kPointerSize);
Ben Murdoch589d6972011-11-30 16:04:58 +00002263
2264 __ bind(&copy_routine);
Ben Murdoch589d6972011-11-30 16:04:58 +00002265 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002266
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002267 // rdi: underlying subject string
2268 // rbx: instance type of underlying subject string
2269 // rdx: adjusted start index (smi)
2270 // rcx: length
2271 // The subject string can only be external or sequential string of either
2272 // encoding at this point.
2273 Label two_byte_sequential, sequential_string;
2274 STATIC_ASSERT(kExternalStringTag != 0);
2275 STATIC_ASSERT(kSeqStringTag == 0);
2276 __ testb(rbx, Immediate(kExternalStringTag));
2277 __ j(zero, &sequential_string);
2278
2279 // Handle external string.
2280 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002281 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002282 __ testb(rbx, Immediate(kShortExternalStringMask));
2283 __ j(not_zero, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002284 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002285 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002286 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2287 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002288
2289 __ bind(&sequential_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002290 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002291 __ testb(rbx, Immediate(kStringEncodingMask));
2292 __ j(zero, &two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002293
2294 // Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002295 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002296
2297 // rax: result string
2298 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002299 { // Locate character of sub string start.
2300 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2302 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002303 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002304 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002305 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002306
2307 // rax: result string
2308 // rcx: result length
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002309 // r14: first character of result
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002310 // rsi: character of sub string start
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002311 StringHelper::GenerateCopyCharacters(
2312 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002313 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002314 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002315
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002316 __ bind(&two_byte_sequential);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002317 // Allocate the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002318 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002319
2320 // rax: result string
2321 // rcx: result string length
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002322 { // Locate character of sub string start.
2323 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002324 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
2325 SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch85b71792012-04-11 18:30:58 +01002326 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002327 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002328 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002329
2330 // rax: result string
2331 // rcx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002332 // rdi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002333 // r14: character of sub string start
2334 StringHelper::GenerateCopyCharacters(
2335 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002336 __ IncrementCounter(counters->sub_string_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002337 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002338
2339 // Just jump to runtime to create the sub string.
2340 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002341 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002342
2343 __ bind(&single_char);
2344 // rax: string
2345 // rbx: instance type
2346 // rcx: sub string length (smi)
2347 // rdx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002348 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002349 &runtime, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002350 generator.GenerateFast(masm);
2351 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
2352 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002353}
2354
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002355void ToStringStub::Generate(MacroAssembler* masm) {
2356 // The ToString stub takes one argument in rax.
2357 Label is_number;
2358 __ JumpIfSmi(rax, &is_number, Label::kNear);
2359
2360 Label not_string;
2361 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
2362 // rax: receiver
2363 // rdi: receiver map
2364 __ j(above_equal, &not_string, Label::kNear);
2365 __ Ret();
2366 __ bind(&not_string);
2367
2368 Label not_heap_number;
2369 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2370 __ j(not_equal, &not_heap_number, Label::kNear);
2371 __ bind(&is_number);
2372 NumberToStringStub stub(isolate());
2373 __ TailCallStub(&stub);
2374 __ bind(&not_heap_number);
2375
2376 Label not_oddball;
2377 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2378 __ j(not_equal, &not_oddball, Label::kNear);
2379 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2380 __ Ret();
2381 __ bind(&not_oddball);
2382
2383 __ PopReturnAddressTo(rcx); // Pop return address.
2384 __ Push(rax); // Push argument.
2385 __ PushReturnAddressFrom(rcx); // Push return address.
2386 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002387}
2388
Ben Murdoch097c5b22016-05-18 11:27:45 +01002389void ToNameStub::Generate(MacroAssembler* masm) {
2390 // The ToName stub takes one argument in rax.
2391 Label is_number;
2392 __ JumpIfSmi(rax, &is_number, Label::kNear);
2393
2394 Label not_name;
2395 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2396 __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
2397 // rax: receiver
2398 // rdi: receiver map
2399 __ j(above, &not_name, Label::kNear);
2400 __ Ret();
2401 __ bind(&not_name);
2402
2403 Label not_heap_number;
2404 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
2405 __ j(not_equal, &not_heap_number, Label::kNear);
2406 __ bind(&is_number);
2407 NumberToStringStub stub(isolate());
2408 __ TailCallStub(&stub);
2409 __ bind(&not_heap_number);
2410
2411 Label not_oddball;
2412 __ CmpInstanceType(rdi, ODDBALL_TYPE);
2413 __ j(not_equal, &not_oddball, Label::kNear);
2414 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
2415 __ Ret();
2416 __ bind(&not_oddball);
2417
2418 __ PopReturnAddressTo(rcx); // Pop return address.
2419 __ Push(rax); // Push argument.
2420 __ PushReturnAddressFrom(rcx); // Push return address.
2421 __ TailCallRuntime(Runtime::kToName);
2422}
2423
2424
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002425void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2426 Register left,
2427 Register right,
2428 Register scratch1,
2429 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002430 Register length = scratch1;
2431
2432 // Compare lengths.
2433 Label check_zero_length;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002434 __ movp(length, FieldOperand(left, String::kLengthOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002435 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
2436 __ j(equal, &check_zero_length, Label::kNear);
2437 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2438 __ ret(0);
2439
2440 // Check if the length is zero.
2441 Label compare_chars;
2442 __ bind(&check_zero_length);
2443 STATIC_ASSERT(kSmiTag == 0);
2444 __ SmiTest(length);
2445 __ j(not_zero, &compare_chars, Label::kNear);
2446 __ Move(rax, Smi::FromInt(EQUAL));
2447 __ ret(0);
2448
2449 // Compare characters.
2450 __ bind(&compare_chars);
2451 Label strings_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002452 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2453 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002454
2455 // Characters are equal.
2456 __ Move(rax, Smi::FromInt(EQUAL));
2457 __ ret(0);
2458
2459 // Characters are not equal.
2460 __ bind(&strings_not_equal);
2461 __ Move(rax, Smi::FromInt(NOT_EQUAL));
2462 __ ret(0);
2463}
2464
2465
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002466void StringHelper::GenerateCompareFlatOneByteStrings(
2467 MacroAssembler* masm, Register left, Register right, Register scratch1,
2468 Register scratch2, Register scratch3, Register scratch4) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002469 // Ensure that you can always subtract a string length from a non-negative
2470 // number (e.g. another length).
2471 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
2472
2473 // Find minimum length and length difference.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002474 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
2475 __ movp(scratch4, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002476 __ SmiSub(scratch4,
2477 scratch4,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002478 FieldOperand(right, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002479 // Register scratch4 now holds left.length - right.length.
2480 const Register length_difference = scratch4;
Ben Murdoch257744e2011-11-30 15:57:28 +00002481 Label left_shorter;
2482 __ j(less, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002483 // The right string isn't longer that the left one.
2484 // Get the right string's length by subtracting the (non-negative) difference
2485 // from the left string's length.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002486 __ SmiSub(scratch1, scratch1, length_difference);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002487 __ bind(&left_shorter);
2488 // Register scratch1 now holds Min(left.length, right.length).
2489 const Register min_length = scratch1;
2490
Ben Murdoch257744e2011-11-30 15:57:28 +00002491 Label compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002492 // If min-length is zero, go directly to comparing lengths.
2493 __ SmiTest(min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002494 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002495
Ben Murdoch257744e2011-11-30 15:57:28 +00002496 // Compare loop.
2497 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002498 GenerateOneByteCharsCompareLoop(
2499 masm, left, right, min_length, scratch2, &result_not_equal,
2500 // In debug-code mode, SmiTest below might push
2501 // the target label outside the near range.
2502 Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002503
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002504 // Completed loop without finding different characters.
2505 // Compare lengths (precomputed).
2506 __ bind(&compare_lengths);
2507 __ SmiTest(length_difference);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002508 Label length_not_equal;
2509 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002510
2511 // Result is EQUAL.
2512 __ Move(rax, Smi::FromInt(EQUAL));
2513 __ ret(0);
2514
Ben Murdoch257744e2011-11-30 15:57:28 +00002515 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002516 Label result_less;
2517 __ bind(&length_not_equal);
2518 __ j(greater, &result_greater, Label::kNear);
2519 __ jmp(&result_less, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002520 __ bind(&result_not_equal);
2521 // Unequal comparison of left to right, either character or length.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002522 __ j(above, &result_greater, Label::kNear);
2523 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002524
2525 // Result is LESS.
2526 __ Move(rax, Smi::FromInt(LESS));
2527 __ ret(0);
2528
2529 // Result is GREATER.
2530 __ bind(&result_greater);
2531 __ Move(rax, Smi::FromInt(GREATER));
2532 __ ret(0);
2533}
2534
2535
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002536void StringHelper::GenerateOneByteCharsCompareLoop(
2537 MacroAssembler* masm, Register left, Register right, Register length,
2538 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002539 // Change index to run from -length to -1 by adding length to string
2540 // start. This means that loop ends when index reaches zero, which
2541 // doesn't need an additional compare.
2542 __ SmiToInteger32(length, length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002543 __ leap(left,
2544 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2545 __ leap(right,
2546 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2547 __ negq(length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002548 Register index = length; // index = -length;
2549
2550 // Compare loop.
2551 Label loop;
2552 __ bind(&loop);
2553 __ movb(scratch, Operand(left, index, times_1, 0));
2554 __ cmpb(scratch, Operand(right, index, times_1, 0));
2555 __ j(not_equal, chars_not_equal, near_jump);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002556 __ incq(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00002557 __ j(not_zero, &loop);
2558}
2559
2560
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002561void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2562 // ----------- S t a t e -------------
2563 // -- rdx : left
2564 // -- rax : right
2565 // -- rsp[0] : return address
2566 // -----------------------------------
2567
2568 // Load rcx with the allocation site. We stick an undefined dummy value here
2569 // and replace it with the real allocation site later when we instantiate this
2570 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
Ben Murdoch61f157c2016-09-16 13:49:30 +01002571 __ Move(rcx, isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002572
2573 // Make sure that we actually patched the allocation site.
2574 if (FLAG_debug_code) {
2575 __ testb(rcx, Immediate(kSmiTagMask));
2576 __ Assert(not_equal, kExpectedAllocationSite);
2577 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2578 isolate()->factory()->allocation_site_map());
2579 __ Assert(equal, kExpectedAllocationSite);
2580 }
2581
2582 // Tail call into the stub that handles binary operations with allocation
2583 // sites.
2584 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2585 __ TailCallStub(&stub);
2586}
2587
2588
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002589void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2590 DCHECK_EQ(CompareICState::BOOLEAN, state());
2591 Label miss;
2592 Label::Distance const miss_distance =
2593 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2594
2595 __ JumpIfSmi(rdx, &miss, miss_distance);
2596 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
2597 __ JumpIfSmi(rax, &miss, miss_distance);
2598 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2599 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2600 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002601 if (!Token::IsEqualityOp(op())) {
2602 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
2603 __ AssertSmi(rax);
2604 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset));
2605 __ AssertSmi(rdx);
2606 __ pushq(rax);
2607 __ movq(rax, rdx);
2608 __ popq(rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002609 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002610 __ subp(rax, rdx);
2611 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612
2613 __ bind(&miss);
2614 GenerateMiss(masm);
2615}
2616
2617
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002618void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2619 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00002620 Label miss;
2621 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002622
2623 if (GetCondition() == equal) {
2624 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002625 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002626 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002627 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002628 __ subp(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002629 __ j(no_overflow, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002630 // Correct sign of result in case of overflow.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002631 __ notp(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002632 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002633 __ movp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002634 }
2635 __ ret(0);
2636
2637 __ bind(&miss);
2638 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002639}
2640
2641
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002642void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2643 DCHECK(state() == CompareICState::NUMBER);
Steve Block1e0659c2011-05-24 12:43:12 +01002644
Ben Murdoch257744e2011-11-30 15:57:28 +00002645 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002646 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00002647 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01002648
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002649 if (left() == CompareICState::SMI) {
2650 __ JumpIfNotSmi(rdx, &miss);
2651 }
2652 if (right() == CompareICState::SMI) {
2653 __ JumpIfNotSmi(rax, &miss);
2654 }
2655
2656 // Load left and right operand.
2657 Label done, left, left_smi, right_smi;
2658 __ JumpIfSmi(rax, &right_smi, Label::kNear);
2659 __ CompareMap(rax, isolate()->factory()->heap_number_map());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002660 __ j(not_equal, &maybe_undefined1, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002661 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002662 __ jmp(&left, Label::kNear);
2663 __ bind(&right_smi);
2664 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
2665 __ Cvtlsi2sd(xmm1, rcx);
Steve Block1e0659c2011-05-24 12:43:12 +01002666
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002667 __ bind(&left);
2668 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
2669 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
2670 __ j(not_equal, &maybe_undefined2, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002671 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002672 __ jmp(&done);
2673 __ bind(&left_smi);
2674 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
2675 __ Cvtlsi2sd(xmm0, rcx);
2676
2677 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01002678 // Compare operands
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002679 __ Ucomisd(xmm0, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01002680
2681 // Don't base result on EFLAGS when a NaN is involved.
Ben Murdoch257744e2011-11-30 15:57:28 +00002682 __ j(parity_even, &unordered, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002683
2684 // Return a result of -1, 0, or 1, based on EFLAGS.
2685 // Performing mov, because xor would destroy the flag register.
2686 __ movl(rax, Immediate(0));
2687 __ movl(rcx, Immediate(0));
2688 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002689 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
Steve Block1e0659c2011-05-24 12:43:12 +01002690 __ ret(0);
2691
2692 __ bind(&unordered);
Steve Block1e0659c2011-05-24 12:43:12 +01002693 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002694 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002695 CompareICState::GENERIC, CompareICState::GENERIC);
Steve Block1e0659c2011-05-24 12:43:12 +01002696 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2697
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002698 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002699 if (Token::IsOrderedRelationalCompareOp(op())) {
2700 __ Cmp(rax, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002701 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 __ JumpIfSmi(rdx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002703 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
2704 __ j(not_equal, &maybe_undefined2, Label::kNear);
2705 __ jmp(&unordered);
2706 }
2707
2708 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002709 if (Token::IsOrderedRelationalCompareOp(op())) {
2710 __ Cmp(rdx, isolate()->factory()->undefined_value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002711 __ j(equal, &unordered);
2712 }
2713
Steve Block1e0659c2011-05-24 12:43:12 +01002714 __ bind(&miss);
2715 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002716}
2717
2718
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002719void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2720 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2721 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002722
2723 // Registers containing left and right operands respectively.
2724 Register left = rdx;
2725 Register right = rax;
2726 Register tmp1 = rcx;
2727 Register tmp2 = rbx;
2728
2729 // Check that both operands are heap objects.
2730 Label miss;
2731 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2732 __ j(cond, &miss, Label::kNear);
2733
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002734 // Check that both operands are internalized strings.
2735 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2736 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2737 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2738 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2739 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2740 __ orp(tmp1, tmp2);
2741 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2742 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002743
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002744 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002745 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002747 // Make sure rax is non-zero. At this point input operands are
2748 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 DCHECK(right.is(rax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002750 __ j(not_equal, &done, Label::kNear);
2751 STATIC_ASSERT(EQUAL == 0);
2752 STATIC_ASSERT(kSmiTag == 0);
2753 __ Move(rax, Smi::FromInt(EQUAL));
2754 __ bind(&done);
2755 __ ret(0);
2756
2757 __ bind(&miss);
2758 GenerateMiss(masm);
2759}
2760
2761
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002762void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2763 DCHECK(state() == CompareICState::UNIQUE_NAME);
2764 DCHECK(GetCondition() == equal);
2765
2766 // Registers containing left and right operands respectively.
2767 Register left = rdx;
2768 Register right = rax;
2769 Register tmp1 = rcx;
2770 Register tmp2 = rbx;
2771
2772 // Check that both operands are heap objects.
2773 Label miss;
2774 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2775 __ j(cond, &miss, Label::kNear);
2776
2777 // Check that both operands are unique names. This leaves the instance
2778 // types loaded in tmp1 and tmp2.
2779 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2780 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2781 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2782 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2783
2784 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2785 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2786
2787 // Unique names are compared by identity.
2788 Label done;
2789 __ cmpp(left, right);
2790 // Make sure rax is non-zero. At this point input operands are
2791 // guaranteed to be non-zero.
2792 DCHECK(right.is(rax));
2793 __ j(not_equal, &done, Label::kNear);
2794 STATIC_ASSERT(EQUAL == 0);
2795 STATIC_ASSERT(kSmiTag == 0);
2796 __ Move(rax, Smi::FromInt(EQUAL));
2797 __ bind(&done);
2798 __ ret(0);
2799
2800 __ bind(&miss);
2801 GenerateMiss(masm);
2802}
2803
2804
2805void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2806 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00002807 Label miss;
2808
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002809 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002810
Ben Murdoch257744e2011-11-30 15:57:28 +00002811 // Registers containing left and right operands respectively.
2812 Register left = rdx;
2813 Register right = rax;
2814 Register tmp1 = rcx;
2815 Register tmp2 = rbx;
2816 Register tmp3 = rdi;
2817
2818 // Check that both operands are heap objects.
2819 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
2820 __ j(cond, &miss);
2821
2822 // Check that both operands are strings. This leaves the instance
2823 // types loaded in tmp1 and tmp2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2825 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2826 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2827 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2828 __ movp(tmp3, tmp1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002829 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002830 __ orp(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002831 __ testb(tmp3, Immediate(kIsNotStringMask));
2832 __ j(not_zero, &miss);
2833
2834 // Fast check for identical strings.
2835 Label not_same;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002836 __ cmpp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002837 __ j(not_equal, &not_same, Label::kNear);
2838 STATIC_ASSERT(EQUAL == 0);
2839 STATIC_ASSERT(kSmiTag == 0);
2840 __ Move(rax, Smi::FromInt(EQUAL));
2841 __ ret(0);
2842
2843 // Handle not identical strings.
2844 __ bind(&not_same);
2845
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002846 // Check that both strings are internalized strings. If they are, we're done
2847 // because we already know they are not identical. We also know they are both
2848 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002849 if (equality) {
2850 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002851 STATIC_ASSERT(kInternalizedTag == 0);
2852 __ orp(tmp1, tmp2);
2853 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
2854 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002855 // Make sure rax is non-zero. At this point input operands are
2856 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002857 DCHECK(right.is(rax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002858 __ ret(0);
2859 __ bind(&do_compare);
2860 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002861
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002862 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00002863 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002864 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002866 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002867 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002868 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2869 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002870 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002871 StringHelper::GenerateCompareFlatOneByteStrings(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002872 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
2873 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002874
2875 // Handle more complex cases in runtime.
2876 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002877 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01002878 {
2879 FrameScope scope(masm, StackFrame::INTERNAL);
2880 __ Push(left);
2881 __ Push(right);
2882 __ CallRuntime(Runtime::kStringEqual);
2883 }
2884 __ LoadRoot(rdx, Heap::kTrueValueRootIndex);
2885 __ subp(rax, rdx);
2886 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002887 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01002888 __ PopReturnAddressTo(tmp1);
2889 __ Push(left);
2890 __ Push(right);
2891 __ PushReturnAddressFrom(tmp1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002892 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002893 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002894
2895 __ bind(&miss);
2896 GenerateMiss(masm);
2897}
2898
2899
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002900void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2901 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00002902 Label miss;
Steve Block1e0659c2011-05-24 12:43:12 +01002903 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002904 __ j(either_smi, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002905
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002906 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2907 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
2908 __ j(below, &miss, Label::kNear);
2909 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx);
2910 __ j(below, &miss, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002911
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002912 DCHECK_EQ(equal, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 __ subp(rax, rdx);
Steve Block1e0659c2011-05-24 12:43:12 +01002914 __ ret(0);
2915
2916 __ bind(&miss);
2917 GenerateMiss(masm);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002918}
2919
2920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002921void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002922 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002923 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002924 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
2925 __ j(either_smi, &miss, Label::kNear);
2926
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002927 __ GetWeakValue(rdi, cell);
2928 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002929 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002930 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002931 __ j(not_equal, &miss, Label::kNear);
2932
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002933 if (Token::IsEqualityOp(op())) {
2934 __ subp(rax, rdx);
2935 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002936 } else {
2937 __ PopReturnAddressTo(rcx);
2938 __ Push(rdx);
2939 __ Push(rax);
2940 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition())));
2941 __ PushReturnAddressFrom(rcx);
2942 __ TailCallRuntime(Runtime::kCompare);
2943 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002944
2945 __ bind(&miss);
2946 GenerateMiss(masm);
2947}
2948
2949
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002950void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002951 {
2952 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002953 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002954 __ Push(rdx);
2955 __ Push(rax);
2956 __ Push(rdx);
2957 __ Push(rax);
2958 __ Push(Smi::FromInt(op()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002959 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002960
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002961 // Compute the entry point of the rewritten stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002962 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
2963 __ Pop(rax);
2964 __ Pop(rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002965 }
Steve Block1e0659c2011-05-24 12:43:12 +01002966
Steve Block1e0659c2011-05-24 12:43:12 +01002967 // Do a tail call to the rewritten stub.
2968 __ jmp(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002969}
2970
Steve Block1e0659c2011-05-24 12:43:12 +01002971
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2973 Label* miss,
2974 Label* done,
2975 Register properties,
2976 Handle<Name> name,
2977 Register r0) {
2978 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00002979 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2980 // not equal to the name and kProbes-th slot is not used (its name is the
2981 // undefined value), it guarantees the hash table doesn't contain the
2982 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002983 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00002984 for (int i = 0; i < kInlinedProbes; i++) {
2985 // r0 points to properties hash.
2986 // Compute the masked index: (hash + i + i * i) & mask.
2987 Register index = r0;
2988 // Capacity is smi 2^n.
2989 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
2990 __ decl(index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002991 __ andp(index,
2992 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002993
2994 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002995 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002996 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00002997
2998 Register entity_name = r0;
2999 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003000 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003001 __ movp(entity_name, Operand(properties,
Ben Murdoch257744e2011-11-30 15:57:28 +00003002 index,
3003 times_pointer_size,
3004 kElementsStartOffset - kHeapObjectTag));
3005 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3006 __ j(equal, done);
3007
3008 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003009 __ Cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003010 __ j(equal, miss);
3011
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003012 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003013 // Check for the hole and skip.
3014 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003015 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017 // Check if the entry name is not a unique name.
3018 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3019 __ JumpIfNotUniqueNameInstanceType(
3020 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3021 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003022 }
3023
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003024 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3025 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003026 __ Push(Handle<Object>(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003027 __ Push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003028 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003029 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003030 __ j(not_zero, miss);
3031 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003032}
3033
3034
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003035// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003036// |done| label if a property with the given name is found leaving the
3037// index into the dictionary in |r1|. Jump to the |miss| label
3038// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003039void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3040 Label* miss,
3041 Label* done,
3042 Register elements,
3043 Register name,
3044 Register r0,
3045 Register r1) {
3046 DCHECK(!elements.is(r0));
3047 DCHECK(!elements.is(r1));
3048 DCHECK(!name.is(r0));
3049 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003050
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003051 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003052
3053 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3054 __ decl(r0);
3055
3056 for (int i = 0; i < kInlinedProbes; i++) {
3057 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003058 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3059 __ shrl(r1, Immediate(Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003060 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003061 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003062 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003063 __ andp(r1, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003064
3065 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003066 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003067 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
Ben Murdoch257744e2011-11-30 15:57:28 +00003068
3069 // Check if the key is identical to the name.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003070 __ cmpp(name, Operand(elements, r1, times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003071 kElementsStartOffset - kHeapObjectTag));
3072 __ j(equal, done);
3073 }
3074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003075 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3076 POSITIVE_LOOKUP);
3077 __ Push(name);
3078 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3079 __ shrl(r0, Immediate(Name::kHashShift));
3080 __ Push(r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003081 __ CallStub(&stub);
3082
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003083 __ testp(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003084 __ j(zero, miss);
3085 __ jmp(done);
3086}
3087
3088
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003089void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003090 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3091 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003092 // Stack frame on entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003093 // rsp[0 * kPointerSize] : return address.
3094 // rsp[1 * kPointerSize] : key's hash.
3095 // rsp[2 * kPointerSize] : key.
Ben Murdoch257744e2011-11-30 15:57:28 +00003096 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003097 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003098 // result_: used as scratch.
3099 // index_: will hold an index of entry if lookup is successful.
3100 // might alias with result_.
3101 // Returns:
3102 // result_ is zero if lookup failed, non zero otherwise.
3103
3104 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3105
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003106 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003107
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003109 __ decl(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003110 __ Push(scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003111
3112 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3113 // not equal to the name and kProbes-th slot is not used (its name is the
3114 // undefined value), it guarantees the hash table doesn't contain the
3115 // property. It's true even if some slots represent deleted properties
3116 // (their names are the null value).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003117 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3118 kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00003119 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3120 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003121 __ movp(scratch, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003122 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003123 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003124 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003125 __ andp(scratch, Operand(rsp, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003126
3127 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003128 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003129 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003130
3131 // Having undefined at this place means the name is not contained.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003132 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003133 kElementsStartOffset - kHeapObjectTag));
3134
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003135 __ Cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003136 __ j(equal, &not_in_dictionary);
3137
3138 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003139 __ cmpp(scratch, args.GetArgumentOperand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003140 __ j(equal, &in_dictionary);
3141
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003142 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3143 // If we hit a key that is not a unique name during negative
3144 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003145 // key we are looking for.
3146
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003147 // Check if the entry name is not a unique name.
3148 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3149 __ JumpIfNotUniqueNameInstanceType(
3150 FieldOperand(scratch, Map::kInstanceTypeOffset),
3151 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003152 }
3153 }
3154
3155 __ bind(&maybe_in_dictionary);
3156 // If we are doing negative lookup then probing failure should be
3157 // treated as a lookup success. For positive lookup probing failure
3158 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003159 if (mode() == POSITIVE_LOOKUP) {
3160 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003161 __ Drop(1);
3162 __ ret(2 * kPointerSize);
3163 }
3164
3165 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003166 __ movp(scratch, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003167 __ Drop(1);
3168 __ ret(2 * kPointerSize);
3169
3170 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003171 __ movp(scratch, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003172 __ Drop(1);
3173 __ ret(2 * kPointerSize);
3174}
3175
3176
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003177void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3178 Isolate* isolate) {
3179 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3180 stub1.GetCode();
3181 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3182 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003183}
3184
3185
3186// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3187// the value has just been written into the object, now this stub makes sure
3188// we keep the GC informed. The word in the object where the value has been
3189// written is in the address register.
3190void RecordWriteStub::Generate(MacroAssembler* masm) {
3191 Label skip_to_incremental_noncompacting;
3192 Label skip_to_incremental_compacting;
3193
3194 // The first two instructions are generated with labels so as to get the
3195 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3196 // forth between a compare instructions (a nop in this position) and the
3197 // real branch when we start and stop incremental heap marking.
3198 // See RecordWriteStub::Patch for details.
3199 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3200 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3201
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003202 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3203 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003204 MacroAssembler::kReturnAtEnd);
3205 } else {
3206 __ ret(0);
3207 }
3208
3209 __ bind(&skip_to_incremental_noncompacting);
3210 GenerateIncremental(masm, INCREMENTAL);
3211
3212 __ bind(&skip_to_incremental_compacting);
3213 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3214
3215 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3216 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3217 masm->set_byte_at(0, kTwoByteNopInstruction);
3218 masm->set_byte_at(2, kFiveByteNopInstruction);
3219}
3220
3221
3222void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3223 regs_.Save(masm);
3224
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003226 Label dont_need_remembered_set;
3227
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003228 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003229 __ JumpIfNotInNewSpace(regs_.scratch0(),
3230 regs_.scratch0(),
3231 &dont_need_remembered_set);
3232
Ben Murdoch097c5b22016-05-18 11:27:45 +01003233 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3234 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003235
3236 // First notify the incremental marker if necessary, then update the
3237 // remembered set.
3238 CheckNeedsToInformIncrementalMarker(
3239 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003240 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003241 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003242 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003243 MacroAssembler::kReturnAtEnd);
3244
3245 __ bind(&dont_need_remembered_set);
3246 }
3247
3248 CheckNeedsToInformIncrementalMarker(
3249 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003250 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003251 regs_.Restore(masm);
3252 __ ret(0);
3253}
3254
3255
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003256void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3257 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003258 Register address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003259 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
3260 DCHECK(!address.is(regs_.object()));
3261 DCHECK(!address.is(arg_reg_1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003262 __ Move(address, regs_.address());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003263 __ Move(arg_reg_1, regs_.object());
3264 // TODO(gc) Can we just set address arg2 in the beginning?
3265 __ Move(arg_reg_2, address);
3266 __ LoadAddress(arg_reg_3,
3267 ExternalReference::isolate_address(isolate()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003268 int argument_count = 3;
3269
3270 AllowExternalCallThatCantCauseGC scope(masm);
3271 __ PrepareCallCFunction(argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003272 __ CallCFunction(
3273 ExternalReference::incremental_marking_record_write_function(isolate()),
3274 argument_count);
3275 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003276}
3277
3278
3279void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3280 MacroAssembler* masm,
3281 OnNoNeedToInformIncrementalMarker on_no_need,
3282 Mode mode) {
3283 Label on_black;
3284 Label need_incremental;
3285 Label need_incremental_pop_object;
3286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003287 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3288 __ andp(regs_.scratch0(), regs_.object());
3289 __ movp(regs_.scratch1(),
3290 Operand(regs_.scratch0(),
3291 MemoryChunk::kWriteBarrierCounterOffset));
3292 __ subp(regs_.scratch1(), Immediate(1));
3293 __ movp(Operand(regs_.scratch0(),
3294 MemoryChunk::kWriteBarrierCounterOffset),
3295 regs_.scratch1());
3296 __ j(negative, &need_incremental);
3297
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003298 // Let's look at the color of the object: If it is not black we don't have
3299 // to inform the incremental marker.
3300 __ JumpIfBlack(regs_.object(),
3301 regs_.scratch0(),
3302 regs_.scratch1(),
3303 &on_black,
3304 Label::kNear);
3305
3306 regs_.Restore(masm);
3307 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003308 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003309 MacroAssembler::kReturnAtEnd);
3310 } else {
3311 __ ret(0);
3312 }
3313
3314 __ bind(&on_black);
3315
3316 // Get the value from the slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003317 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003318
3319 if (mode == INCREMENTAL_COMPACTION) {
3320 Label ensure_not_white;
3321
3322 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3323 regs_.scratch1(), // Scratch.
3324 MemoryChunk::kEvacuationCandidateMask,
3325 zero,
3326 &ensure_not_white,
3327 Label::kNear);
3328
3329 __ CheckPageFlag(regs_.object(),
3330 regs_.scratch1(), // Scratch.
3331 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3332 zero,
3333 &need_incremental);
3334
3335 __ bind(&ensure_not_white);
3336 }
3337
3338 // We need an extra register for this, so we push the object register
3339 // temporarily.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003340 __ Push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341 __ JumpIfWhite(regs_.scratch0(), // The value.
3342 regs_.scratch1(), // Scratch.
3343 regs_.object(), // Scratch.
3344 &need_incremental_pop_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003345 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003346
3347 regs_.Restore(masm);
3348 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003349 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003350 MacroAssembler::kReturnAtEnd);
3351 } else {
3352 __ ret(0);
3353 }
3354
3355 __ bind(&need_incremental_pop_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003356 __ Pop(regs_.object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003357
3358 __ bind(&need_incremental);
3359
3360 // Fall through when we need to inform the incremental marker.
3361}
3362
3363
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003364void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3365 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3366 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3367 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003368 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003369 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
3370 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3371 __ PopReturnAddressTo(rcx);
3372 int additional_offset =
3373 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3374 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
3375 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
3376}
3377
3378
3379void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003380 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003381 LoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003382 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383}
3384
3385
3386void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003387 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003388 KeyedLoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003389 stub.GenerateForTrampoline(masm);
3390}
3391
3392
3393static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3394 Register receiver_map, Register scratch1,
3395 Register scratch2, Register scratch3,
3396 bool is_polymorphic, Label* miss) {
3397 // feedback initially contains the feedback array
3398 Label next_loop, prepare_next;
3399 Label start_polymorphic;
3400
3401 Register counter = scratch1;
3402 Register length = scratch2;
3403 Register cached_map = scratch3;
3404
3405 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3406 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3407 __ j(not_equal, &start_polymorphic);
3408
3409 // found, now call handler.
3410 Register handler = feedback;
3411 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3412 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3413 __ jmp(handler);
3414
3415 // Polymorphic, we have to loop from 2 to N
3416 __ bind(&start_polymorphic);
3417 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3418 if (!is_polymorphic) {
3419 // If the IC could be monomorphic we have to make sure we don't go past the
3420 // end of the feedback array.
3421 __ cmpl(length, Immediate(2));
3422 __ j(equal, miss);
3423 }
3424 __ movl(counter, Immediate(2));
3425
3426 __ bind(&next_loop);
3427 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3428 FixedArray::kHeaderSize));
3429 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3430 __ j(not_equal, &prepare_next);
3431 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
3432 FixedArray::kHeaderSize + kPointerSize));
3433 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3434 __ jmp(handler);
3435
3436 __ bind(&prepare_next);
3437 __ addl(counter, Immediate(2));
3438 __ cmpl(counter, length);
3439 __ j(less, &next_loop);
3440
3441 // We exhausted our array of map handler pairs.
3442 __ jmp(miss);
3443}
3444
3445
3446static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3447 Register receiver_map, Register feedback,
3448 Register vector, Register integer_slot,
3449 Label* compare_map, Label* load_smi_map,
3450 Label* try_array) {
3451 __ JumpIfSmi(receiver, load_smi_map);
3452 __ movp(receiver_map, FieldOperand(receiver, 0));
3453
3454 __ bind(compare_map);
3455 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
3456 __ j(not_equal, try_array);
3457 Register handler = feedback;
3458 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
3459 FixedArray::kHeaderSize + kPointerSize));
3460 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
3461 __ jmp(handler);
3462}
3463
3464
3465void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3466
3467
3468void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3469 GenerateImpl(masm, true);
3470}
3471
3472
3473void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3474 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3475 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx
3476 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3477 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3478 Register feedback = rdi;
3479 Register integer_slot = r8;
3480 Register receiver_map = r9;
3481
3482 __ SmiToInteger32(integer_slot, slot);
3483 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3484 FixedArray::kHeaderSize));
3485
3486 // Try to quickly handle the monomorphic case without knowing for sure
3487 // if we have a weak cell in feedback. We do know it's safe to look
3488 // at WeakCell::kValueOffset.
3489 Label try_array, load_smi_map, compare_map;
3490 Label not_array, miss;
3491 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3492 integer_slot, &compare_map, &load_smi_map, &try_array);
3493
3494 // Is it a fixed array?
3495 __ bind(&try_array);
3496 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3497 __ j(not_equal, &not_array);
3498 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3499 &miss);
3500
3501 __ bind(&not_array);
3502 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3503 __ j(not_equal, &miss);
Ben Murdochc5610432016-08-08 18:44:38 +01003504 Code::Flags code_flags =
3505 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003506 masm->isolate()->stub_cache()->GenerateProbe(
3507 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
3508
3509 __ bind(&miss);
3510 LoadIC::GenerateMiss(masm);
3511
3512 __ bind(&load_smi_map);
3513 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3514 __ jmp(&compare_map);
3515}
3516
3517
3518void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3519 GenerateImpl(masm, false);
3520}
3521
3522
3523void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3524 GenerateImpl(masm, true);
3525}
3526
3527
3528void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3529 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
3530 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx
3531 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
3532 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
3533 Register feedback = rdi;
3534 Register integer_slot = r8;
3535 Register receiver_map = r9;
3536
3537 __ SmiToInteger32(integer_slot, slot);
3538 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3539 FixedArray::kHeaderSize));
3540
3541 // Try to quickly handle the monomorphic case without knowing for sure
3542 // if we have a weak cell in feedback. We do know it's safe to look
3543 // at WeakCell::kValueOffset.
3544 Label try_array, load_smi_map, compare_map;
3545 Label not_array, miss;
3546 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3547 integer_slot, &compare_map, &load_smi_map, &try_array);
3548
3549 __ bind(&try_array);
3550 // Is it a fixed array?
3551 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3552 __ j(not_equal, &not_array);
3553
3554 // We have a polymorphic element handler.
3555 Label polymorphic, try_poly_name;
3556 __ bind(&polymorphic);
3557 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true,
3558 &miss);
3559
3560 __ bind(&not_array);
3561 // Is it generic?
3562 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3563 __ j(not_equal, &try_poly_name);
3564 Handle<Code> megamorphic_stub =
3565 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3566 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3567
3568 __ bind(&try_poly_name);
3569 // We might have a name in feedback, and a fixed array in the next slot.
3570 __ cmpp(key, feedback);
3571 __ j(not_equal, &miss);
3572 // If the name comparison succeeded, we know we have a fixed array with
3573 // at least one map/handler pair.
3574 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3575 FixedArray::kHeaderSize + kPointerSize));
3576 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false,
3577 &miss);
3578
3579 __ bind(&miss);
3580 KeyedLoadIC::GenerateMiss(masm);
3581
3582 __ bind(&load_smi_map);
3583 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3584 __ jmp(&compare_map);
3585}
3586
3587
3588void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3589 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3590 VectorStoreICStub stub(isolate(), state());
3591 stub.GenerateForTrampoline(masm);
3592}
3593
3594
3595void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3596 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3597 VectorKeyedStoreICStub stub(isolate(), state());
3598 stub.GenerateForTrampoline(masm);
3599}
3600
3601
3602void VectorStoreICStub::Generate(MacroAssembler* masm) {
3603 GenerateImpl(masm, false);
3604}
3605
3606
3607void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3608 GenerateImpl(masm, true);
3609}
3610
3611
3612void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3613 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3614 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3615 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3616 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3617 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3618 Register feedback = r8;
3619 Register integer_slot = r9;
3620 Register receiver_map = r11;
3621 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3622
3623 __ SmiToInteger32(integer_slot, slot);
3624 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3625 FixedArray::kHeaderSize));
3626
3627 // Try to quickly handle the monomorphic case without knowing for sure
3628 // if we have a weak cell in feedback. We do know it's safe to look
3629 // at WeakCell::kValueOffset.
3630 Label try_array, load_smi_map, compare_map;
3631 Label not_array, miss;
3632 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3633 integer_slot, &compare_map, &load_smi_map, &try_array);
3634
3635 // Is it a fixed array?
3636 __ bind(&try_array);
3637 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3638 __ j(not_equal, &not_array);
3639 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true,
3640 &miss);
3641
3642 __ bind(&not_array);
3643 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3644 __ j(not_equal, &miss);
3645
Ben Murdochc5610432016-08-08 18:44:38 +01003646 Code::Flags code_flags =
3647 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003648 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3649 receiver, key, feedback, no_reg);
3650
3651 __ bind(&miss);
3652 StoreIC::GenerateMiss(masm);
3653
3654 __ bind(&load_smi_map);
3655 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3656 __ jmp(&compare_map);
3657}
3658
3659
3660void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3661 GenerateImpl(masm, false);
3662}
3663
3664
3665void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3666 GenerateImpl(masm, true);
3667}
3668
3669
3670static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3671 Register receiver_map,
3672 Register feedback, Register scratch,
3673 Register scratch1,
3674 Register scratch2, Label* miss) {
3675 // feedback initially contains the feedback array
3676 Label next, next_loop, prepare_next;
3677 Label transition_call;
3678
3679 Register cached_map = scratch;
3680 Register counter = scratch1;
3681 Register length = scratch2;
3682
3683 // Polymorphic, we have to loop from 0 to N - 1
3684 __ movp(counter, Immediate(0));
3685 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
3686 __ SmiToInteger32(length, length);
3687
3688 __ bind(&next_loop);
3689 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3690 FixedArray::kHeaderSize));
3691 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3692 __ j(not_equal, &prepare_next);
3693 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
3694 FixedArray::kHeaderSize + kPointerSize));
3695 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3696 __ j(not_equal, &transition_call);
3697 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3698 FixedArray::kHeaderSize + 2 * kPointerSize));
3699 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3700 __ jmp(feedback);
3701
3702 __ bind(&transition_call);
3703 DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3704 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3705 // The weak cell may have been cleared.
3706 __ JumpIfSmi(receiver_map, miss);
3707 // Get the handler in value.
3708 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
3709 FixedArray::kHeaderSize + 2 * kPointerSize));
3710 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
3711 __ jmp(feedback);
3712
3713 __ bind(&prepare_next);
3714 __ addl(counter, Immediate(3));
3715 __ cmpl(counter, length);
3716 __ j(less, &next_loop);
3717
3718 // We exhausted our array of map handler pairs.
3719 __ jmp(miss);
3720}
3721
3722
3723void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3724 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx
3725 Register key = VectorStoreICDescriptor::NameRegister(); // rcx
3726 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx
3727 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi
3728 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax
3729 Register feedback = r8;
3730 Register integer_slot = r9;
3731 Register receiver_map = r11;
3732 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
3733
3734 __ SmiToInteger32(integer_slot, slot);
3735 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3736 FixedArray::kHeaderSize));
3737
3738 // Try to quickly handle the monomorphic case without knowing for sure
3739 // if we have a weak cell in feedback. We do know it's safe to look
3740 // at WeakCell::kValueOffset.
3741 Label try_array, load_smi_map, compare_map;
3742 Label not_array, miss;
3743 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
3744 integer_slot, &compare_map, &load_smi_map, &try_array);
3745
3746 // Is it a fixed array?
3747 __ bind(&try_array);
3748 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3749 __ j(not_equal, &not_array);
3750 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
3751 r15, r14, &miss);
3752
3753 __ bind(&not_array);
3754 Label try_poly_name;
3755 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3756 __ j(not_equal, &try_poly_name);
3757
3758 Handle<Code> megamorphic_stub =
3759 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3760 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3761
3762 __ bind(&try_poly_name);
3763 // We might have a name in feedback, and a fixed array in the next slot.
3764 __ cmpp(key, feedback);
3765 __ j(not_equal, &miss);
3766 // If the name comparison succeeded, we know we have a fixed array with
3767 // at least one map/handler pair.
3768 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
3769 FixedArray::kHeaderSize + kPointerSize));
3770 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
3771 &miss);
3772
3773 __ bind(&miss);
3774 KeyedStoreIC::GenerateMiss(masm);
3775
3776 __ bind(&load_smi_map);
3777 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3778 __ jmp(&compare_map);
3779}
3780
3781
3782void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3783 __ EmitLoadTypeFeedbackVector(rbx);
3784 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003785 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3786}
3787
3788
3789void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3790 if (masm->isolate()->function_entry_hook() != NULL) {
3791 ProfileEntryHookStub stub(masm->isolate());
3792 masm->CallStub(&stub);
3793 }
3794}
3795
3796
3797void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3798 // This stub can be called from essentially anywhere, so it needs to save
3799 // all volatile and callee-save registers.
3800 const size_t kNumSavedRegisters = 2;
3801 __ pushq(arg_reg_1);
3802 __ pushq(arg_reg_2);
3803
3804 // Calculate the original stack pointer and store it in the second arg.
3805 __ leap(arg_reg_2,
3806 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
3807
3808 // Calculate the function address to the first arg.
3809 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
3810 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
3811
3812 // Save the remainder of the volatile registers.
3813 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3814
3815 // Call the entry hook function.
3816 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
3817 Assembler::RelocInfoNone());
3818
3819 AllowExternalCallThatCantCauseGC scope(masm);
3820
3821 const int kArgumentCount = 2;
3822 __ PrepareCallCFunction(kArgumentCount);
3823 __ CallCFunction(rax, kArgumentCount);
3824
3825 // Restore volatile regs.
3826 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
3827 __ popq(arg_reg_2);
3828 __ popq(arg_reg_1);
3829
3830 __ Ret();
3831}
3832
3833
3834template<class T>
3835static void CreateArrayDispatch(MacroAssembler* masm,
3836 AllocationSiteOverrideMode mode) {
3837 if (mode == DISABLE_ALLOCATION_SITES) {
3838 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
3839 __ TailCallStub(&stub);
3840 } else if (mode == DONT_OVERRIDE) {
3841 int last_index = GetSequenceIndexFromFastElementsKind(
3842 TERMINAL_FAST_ELEMENTS_KIND);
3843 for (int i = 0; i <= last_index; ++i) {
3844 Label next;
3845 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3846 __ cmpl(rdx, Immediate(kind));
3847 __ j(not_equal, &next);
3848 T stub(masm->isolate(), kind);
3849 __ TailCallStub(&stub);
3850 __ bind(&next);
3851 }
3852
3853 // If we reached this point there is a problem.
3854 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3855 } else {
3856 UNREACHABLE();
3857 }
3858}
3859
3860
3861static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3862 AllocationSiteOverrideMode mode) {
3863 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3864 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
3865 // rax - number of arguments
3866 // rdi - constructor?
3867 // rsp[0] - return address
3868 // rsp[8] - last argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003869
3870 Label normal_sequence;
3871 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003872 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3873 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3874 STATIC_ASSERT(FAST_ELEMENTS == 2);
3875 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3876 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3877 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003878
3879 // is the low bit set? If so, we are holey and that is good.
3880 __ testb(rdx, Immediate(1));
3881 __ j(not_zero, &normal_sequence);
3882 }
3883
3884 // look at the first argument
3885 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3886 __ movp(rcx, args.GetArgumentOperand(0));
3887 __ testp(rcx, rcx);
3888 __ j(zero, &normal_sequence);
3889
3890 if (mode == DISABLE_ALLOCATION_SITES) {
3891 ElementsKind initial = GetInitialFastElementsKind();
3892 ElementsKind holey_initial = GetHoleyElementsKind(initial);
3893
3894 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
3895 holey_initial,
3896 DISABLE_ALLOCATION_SITES);
3897 __ TailCallStub(&stub_holey);
3898
3899 __ bind(&normal_sequence);
3900 ArraySingleArgumentConstructorStub stub(masm->isolate(),
3901 initial,
3902 DISABLE_ALLOCATION_SITES);
3903 __ TailCallStub(&stub);
3904 } else if (mode == DONT_OVERRIDE) {
3905 // We are going to create a holey array, but our kind is non-holey.
3906 // Fix kind and retry (only if we have an allocation site in the slot).
3907 __ incl(rdx);
3908
3909 if (FLAG_debug_code) {
3910 Handle<Map> allocation_site_map =
3911 masm->isolate()->factory()->allocation_site_map();
3912 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
3913 __ Assert(equal, kExpectedAllocationSite);
3914 }
3915
3916 // Save the resulting elements kind in type info. We can't just store r3
3917 // in the AllocationSite::transition_info field because elements kind is
3918 // restricted to a portion of the field...upper bits need to be left alone.
3919 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3920 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
3921 Smi::FromInt(kFastElementsKindPackedToHoley));
3922
3923 __ bind(&normal_sequence);
3924 int last_index = GetSequenceIndexFromFastElementsKind(
3925 TERMINAL_FAST_ELEMENTS_KIND);
3926 for (int i = 0; i <= last_index; ++i) {
3927 Label next;
3928 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3929 __ cmpl(rdx, Immediate(kind));
3930 __ j(not_equal, &next);
3931 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3932 __ TailCallStub(&stub);
3933 __ bind(&next);
3934 }
3935
3936 // If we reached this point there is a problem.
3937 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3938 } else {
3939 UNREACHABLE();
3940 }
3941}
3942
3943
3944template<class T>
3945static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
3946 int to_index = GetSequenceIndexFromFastElementsKind(
3947 TERMINAL_FAST_ELEMENTS_KIND);
3948 for (int i = 0; i <= to_index; ++i) {
3949 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3950 T stub(isolate, kind);
3951 stub.GetCode();
3952 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
3953 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
3954 stub1.GetCode();
3955 }
3956 }
3957}
3958
Ben Murdoch61f157c2016-09-16 13:49:30 +01003959void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003960 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
3961 isolate);
3962 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
3963 isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003964 ArrayNArgumentsConstructorStub stub(isolate);
3965 stub.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003966
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003967 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
3968 for (int i = 0; i < 2; i++) {
3969 // For internal arrays we only need a few things
3970 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
3971 stubh1.GetCode();
3972 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
3973 stubh2.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003974 }
3975}
3976
3977
3978void ArrayConstructorStub::GenerateDispatchToArrayStub(
3979 MacroAssembler* masm,
3980 AllocationSiteOverrideMode mode) {
3981 if (argument_count() == ANY) {
3982 Label not_zero_case, not_one_case;
3983 __ testp(rax, rax);
3984 __ j(not_zero, &not_zero_case);
3985 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3986
3987 __ bind(&not_zero_case);
3988 __ cmpl(rax, Immediate(1));
3989 __ j(greater, &not_one_case);
3990 CreateArrayDispatchOneArgument(masm, mode);
3991
3992 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003993 ArrayNArgumentsConstructorStub stub(masm->isolate());
3994 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003995 } else if (argument_count() == NONE) {
3996 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3997 } else if (argument_count() == ONE) {
3998 CreateArrayDispatchOneArgument(masm, mode);
3999 } else if (argument_count() == MORE_THAN_ONE) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004000 ArrayNArgumentsConstructorStub stub(masm->isolate());
4001 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004002 } else {
4003 UNREACHABLE();
4004 }
4005}
4006
4007
4008void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4009 // ----------- S t a t e -------------
4010 // -- rax : argc
4011 // -- rbx : AllocationSite or undefined
4012 // -- rdi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004013 // -- rdx : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004014 // -- rsp[0] : return address
4015 // -- rsp[8] : last argument
4016 // -----------------------------------
4017 if (FLAG_debug_code) {
4018 // The array construct code is only set for the global and natives
4019 // builtin Array functions which always have maps.
4020
4021 // Initial map for the builtin Array function should be a map.
4022 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4023 // Will both indicate a NULL and a Smi.
4024 STATIC_ASSERT(kSmiTag == 0);
4025 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4026 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4027 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4028 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4029
4030 // We should either have undefined in rbx or a valid AllocationSite
4031 __ AssertUndefinedOrAllocationSite(rbx);
4032 }
4033
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004034 // Enter the context of the Array function.
4035 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
4036
4037 Label subclassing;
4038 __ cmpp(rdi, rdx);
4039 __ j(not_equal, &subclassing);
4040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004041 Label no_info;
4042 // If the feedback vector is the undefined value call an array constructor
4043 // that doesn't use AllocationSites.
4044 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4045 __ j(equal, &no_info);
4046
4047 // Only look at the lower 16 bits of the transition info.
4048 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4049 __ SmiToInteger32(rdx, rdx);
4050 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4051 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4052 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4053
4054 __ bind(&no_info);
4055 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004056
4057 // Subclassing
4058 __ bind(&subclassing);
4059 switch (argument_count()) {
4060 case ANY:
4061 case MORE_THAN_ONE: {
4062 StackArgumentsAccessor args(rsp, rax);
4063 __ movp(args.GetReceiverOperand(), rdi);
4064 __ addp(rax, Immediate(3));
4065 break;
4066 }
4067 case NONE: {
4068 StackArgumentsAccessor args(rsp, 0);
4069 __ movp(args.GetReceiverOperand(), rdi);
4070 __ Set(rax, 3);
4071 break;
4072 }
4073 case ONE: {
4074 StackArgumentsAccessor args(rsp, 1);
4075 __ movp(args.GetReceiverOperand(), rdi);
4076 __ Set(rax, 4);
4077 break;
4078 }
4079 }
4080 __ PopReturnAddressTo(rcx);
4081 __ Push(rdx);
4082 __ Push(rbx);
4083 __ PushReturnAddressFrom(rcx);
4084 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004085}
4086
4087
4088void InternalArrayConstructorStub::GenerateCase(
4089 MacroAssembler* masm, ElementsKind kind) {
4090 Label not_zero_case, not_one_case;
4091 Label normal_sequence;
4092
4093 __ testp(rax, rax);
4094 __ j(not_zero, &not_zero_case);
4095 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4096 __ TailCallStub(&stub0);
4097
4098 __ bind(&not_zero_case);
4099 __ cmpl(rax, Immediate(1));
4100 __ j(greater, &not_one_case);
4101
4102 if (IsFastPackedElementsKind(kind)) {
4103 // We might need to create a holey array
4104 // look at the first argument
4105 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4106 __ movp(rcx, args.GetArgumentOperand(0));
4107 __ testp(rcx, rcx);
4108 __ j(zero, &normal_sequence);
4109
4110 InternalArraySingleArgumentConstructorStub
4111 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4112 __ TailCallStub(&stub1_holey);
4113 }
4114
4115 __ bind(&normal_sequence);
4116 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4117 __ TailCallStub(&stub1);
4118
4119 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004120 ArrayNArgumentsConstructorStub stubN(isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004121 __ TailCallStub(&stubN);
4122}
4123
4124
4125void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4126 // ----------- S t a t e -------------
4127 // -- rax : argc
4128 // -- rdi : constructor
4129 // -- rsp[0] : return address
4130 // -- rsp[8] : last argument
4131 // -----------------------------------
4132
4133 if (FLAG_debug_code) {
4134 // The array construct code is only set for the global and natives
4135 // builtin Array functions which always have maps.
4136
4137 // Initial map for the builtin Array function should be a map.
4138 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4139 // Will both indicate a NULL and a Smi.
4140 STATIC_ASSERT(kSmiTag == 0);
4141 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4142 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4143 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4144 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4145 }
4146
4147 // Figure out the right elements kind
4148 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4149
4150 // Load the map's "bit field 2" into |result|. We only need the first byte,
4151 // but the following masking takes care of that anyway.
4152 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4153 // Retrieve elements_kind from bit field 2.
4154 __ DecodeField<Map::ElementsKindBits>(rcx);
4155
4156 if (FLAG_debug_code) {
4157 Label done;
4158 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4159 __ j(equal, &done);
4160 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4161 __ Assert(equal,
4162 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4163 __ bind(&done);
4164 }
4165
4166 Label fast_elements_case;
4167 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4168 __ j(equal, &fast_elements_case);
4169 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4170
4171 __ bind(&fast_elements_case);
4172 GenerateCase(masm, FAST_ELEMENTS);
4173}
4174
4175
Ben Murdoch097c5b22016-05-18 11:27:45 +01004176void FastNewObjectStub::Generate(MacroAssembler* masm) {
4177 // ----------- S t a t e -------------
4178 // -- rdi : target
4179 // -- rdx : new target
4180 // -- rsi : context
4181 // -- rsp[0] : return address
4182 // -----------------------------------
4183 __ AssertFunction(rdi);
4184 __ AssertReceiver(rdx);
4185
4186 // Verify that the new target is a JSFunction.
4187 Label new_object;
4188 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
4189 __ j(not_equal, &new_object);
4190
4191 // Load the initial map and verify that it's in fact a map.
4192 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
4193 __ JumpIfSmi(rcx, &new_object);
4194 __ CmpObjectType(rcx, MAP_TYPE, rbx);
4195 __ j(not_equal, &new_object);
4196
4197 // Fall back to runtime if the target differs from the new target's
4198 // initial map constructor.
4199 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
4200 __ j(not_equal, &new_object);
4201
4202 // Allocate the JSObject on the heap.
4203 Label allocate, done_allocate;
4204 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4205 __ leal(rbx, Operand(rbx, times_pointer_size, 0));
4206 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4207 __ bind(&done_allocate);
4208
4209 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004210 __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004211 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004212 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
4213 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004214 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004215 __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004216
4217 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004218 // -- rax : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004219 // -- rbx : result fields (untagged)
4220 // -- rdi : result end (untagged)
4221 // -- rcx : initial map
4222 // -- rsi : context
4223 // -- rsp[0] : return address
4224 // -----------------------------------
4225
4226 // Perform in-object slack tracking if requested.
4227 Label slack_tracking;
4228 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4229 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
4230 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4231 Immediate(Map::ConstructionCounter::kMask));
4232 __ j(not_zero, &slack_tracking, Label::kNear);
4233 {
4234 // Initialize all in-object fields with undefined.
4235 __ InitializeFieldsWithFiller(rbx, rdi, r11);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004236 __ Ret();
4237 }
4238 __ bind(&slack_tracking);
4239 {
4240 // Decrease generous allocation count.
4241 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4242 __ subl(FieldOperand(rcx, Map::kBitField3Offset),
4243 Immediate(1 << Map::ConstructionCounter::kShift));
4244
4245 // Initialize the in-object fields with undefined.
4246 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
4247 __ negp(rdx);
4248 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
4249 __ InitializeFieldsWithFiller(rbx, rdx, r11);
4250
4251 // Initialize the remaining (reserved) fields with one pointer filler map.
4252 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
4253 __ InitializeFieldsWithFiller(rdx, rdi, r11);
4254
Ben Murdoch097c5b22016-05-18 11:27:45 +01004255 // Check if we can finalize the instance size.
4256 Label finalize;
4257 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4258 __ testl(FieldOperand(rcx, Map::kBitField3Offset),
4259 Immediate(Map::ConstructionCounter::kMask));
4260 __ j(zero, &finalize, Label::kNear);
4261 __ Ret();
4262
4263 // Finalize the instance size.
4264 __ bind(&finalize);
4265 {
4266 FrameScope scope(masm, StackFrame::INTERNAL);
4267 __ Push(rax);
4268 __ Push(rcx);
4269 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4270 __ Pop(rax);
4271 }
4272 __ Ret();
4273 }
4274
4275 // Fall back to %AllocateInNewSpace.
4276 __ bind(&allocate);
4277 {
4278 FrameScope scope(masm, StackFrame::INTERNAL);
4279 __ Integer32ToSmi(rbx, rbx);
4280 __ Push(rcx);
4281 __ Push(rbx);
4282 __ CallRuntime(Runtime::kAllocateInNewSpace);
4283 __ Pop(rcx);
4284 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004285 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
4286 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01004287 STATIC_ASSERT(kHeapObjectTag == 1);
4288 __ decp(rdi); // Remove the tag from the end address.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004289 __ jmp(&done_allocate);
4290
4291 // Fall back to %NewObject.
4292 __ bind(&new_object);
4293 __ PopReturnAddressTo(rcx);
4294 __ Push(rdi);
4295 __ Push(rdx);
4296 __ PushReturnAddressFrom(rcx);
4297 __ TailCallRuntime(Runtime::kNewObject);
4298}
4299
4300
4301void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4302 // ----------- S t a t e -------------
4303 // -- rdi : function
4304 // -- rsi : context
4305 // -- rbp : frame pointer
4306 // -- rsp[0] : return address
4307 // -----------------------------------
4308 __ AssertFunction(rdi);
4309
Ben Murdochc5610432016-08-08 18:44:38 +01004310 // Make rdx point to the JavaScript frame.
4311 __ movp(rdx, rbp);
4312 if (skip_stub_frame()) {
4313 // For Ignition we need to skip the handler/stub frame to reach the
4314 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004315 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004316 }
4317 if (FLAG_debug_code) {
4318 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004319 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004320 __ j(equal, &ok);
4321 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4322 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004323 }
4324
4325 // Check if we have rest parameters (only possible if we have an
4326 // arguments adaptor frame below the function frame).
4327 Label no_rest_parameters;
4328 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004329 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004330 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4331 __ j(not_equal, &no_rest_parameters, Label::kNear);
4332
4333 // Check if the arguments adaptor frame contains more arguments than
4334 // specified by the function's internal formal parameter count.
4335 Label rest_parameters;
4336 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4337 __ LoadSharedFunctionInfoSpecialField(
4338 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
4339 __ SmiToInteger32(
4340 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4341 __ subl(rax, rcx);
4342 __ j(greater, &rest_parameters);
4343
4344 // Return an empty rest parameter array.
4345 __ bind(&no_rest_parameters);
4346 {
4347 // ----------- S t a t e -------------
4348 // -- rsi : context
4349 // -- rsp[0] : return address
4350 // -----------------------------------
4351
4352 // Allocate an empty rest parameter array.
4353 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004354 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004355 __ bind(&done_allocate);
4356
4357 // Setup the rest parameter array in rax.
4358 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4359 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4360 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4361 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4362 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx);
4363 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0));
4364 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4365 __ Ret();
4366
4367 // Fall back to %AllocateInNewSpace.
4368 __ bind(&allocate);
4369 {
4370 FrameScope scope(masm, StackFrame::INTERNAL);
4371 __ Push(Smi::FromInt(JSArray::kSize));
4372 __ CallRuntime(Runtime::kAllocateInNewSpace);
4373 }
4374 __ jmp(&done_allocate);
4375 }
4376
4377 __ bind(&rest_parameters);
4378 {
4379 // Compute the pointer to the first rest parameter (skippping the receiver).
4380 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4381 StandardFrameConstants::kCallerSPOffset -
4382 1 * kPointerSize));
4383
4384 // ----------- S t a t e -------------
Ben Murdoch61f157c2016-09-16 13:49:30 +01004385 // -- rdi : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01004386 // -- rsi : context
4387 // -- rax : number of rest parameters
4388 // -- rbx : pointer to first rest parameters
4389 // -- rsp[0] : return address
4390 // -----------------------------------
4391
4392 // Allocate space for the rest parameter array plus the backing store.
4393 Label allocate, done_allocate;
4394 __ leal(rcx, Operand(rax, times_pointer_size,
4395 JSArray::kSize + FixedArray::kHeaderSize));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004396 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004397 __ bind(&done_allocate);
4398
4399 // Compute the arguments.length in rdi.
4400 __ Integer32ToSmi(rdi, rax);
4401
4402 // Setup the elements array in rdx.
4403 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4404 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4405 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4406 {
4407 Label loop, done_loop;
4408 __ Set(rcx, 0);
4409 __ bind(&loop);
4410 __ cmpl(rcx, rax);
4411 __ j(equal, &done_loop, Label::kNear);
4412 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4413 __ movp(
4414 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4415 kScratchRegister);
4416 __ subp(rbx, Immediate(1 * kPointerSize));
4417 __ addl(rcx, Immediate(1));
4418 __ jmp(&loop);
4419 __ bind(&done_loop);
4420 }
4421
4422 // Setup the rest parameter array in rax.
4423 __ leap(rax,
4424 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4425 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx);
4426 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx);
4427 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4428 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx);
4429 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx);
4430 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi);
4431 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4432 __ Ret();
4433
Ben Murdoch61f157c2016-09-16 13:49:30 +01004434 // Fall back to %AllocateInNewSpace (if not too big).
4435 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004436 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004437 __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
4438 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004439 {
4440 FrameScope scope(masm, StackFrame::INTERNAL);
4441 __ Integer32ToSmi(rax, rax);
4442 __ Integer32ToSmi(rcx, rcx);
4443 __ Push(rax);
4444 __ Push(rbx);
4445 __ Push(rcx);
4446 __ CallRuntime(Runtime::kAllocateInNewSpace);
4447 __ movp(rdx, rax);
4448 __ Pop(rbx);
4449 __ Pop(rax);
4450 __ SmiToInteger32(rax, rax);
4451 }
4452 __ jmp(&done_allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004453
4454 // Fall back to %NewRestParameter.
4455 __ bind(&too_big_for_new_space);
4456 __ PopReturnAddressTo(kScratchRegister);
4457 __ Push(rdi);
4458 __ PushReturnAddressFrom(kScratchRegister);
4459 __ TailCallRuntime(Runtime::kNewRestParameter);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004460 }
4461}
4462
4463
4464void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4465 // ----------- S t a t e -------------
4466 // -- rdi : function
4467 // -- rsi : context
4468 // -- rbp : frame pointer
4469 // -- rsp[0] : return address
4470 // -----------------------------------
4471 __ AssertFunction(rdi);
4472
Ben Murdochc5610432016-08-08 18:44:38 +01004473 // Make r9 point to the JavaScript frame.
4474 __ movp(r9, rbp);
4475 if (skip_stub_frame()) {
4476 // For Ignition we need to skip the handler/stub frame to reach the
4477 // JavaScript frame for the function.
4478 __ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset));
4479 }
4480 if (FLAG_debug_code) {
4481 Label ok;
4482 __ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset));
4483 __ j(equal, &ok);
4484 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4485 __ bind(&ok);
4486 }
4487
Ben Murdoch097c5b22016-05-18 11:27:45 +01004488 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4489 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4490 __ LoadSharedFunctionInfoSpecialField(
4491 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
Ben Murdochc5610432016-08-08 18:44:38 +01004492 __ leap(rdx, Operand(r9, rcx, times_pointer_size,
Ben Murdoch097c5b22016-05-18 11:27:45 +01004493 StandardFrameConstants::kCallerSPOffset));
4494 __ Integer32ToSmi(rcx, rcx);
4495
4496 // rcx : number of parameters (tagged)
4497 // rdx : parameters pointer
4498 // rdi : function
4499 // rsp[0] : return address
Ben Murdochc5610432016-08-08 18:44:38 +01004500 // r9 : JavaScript frame pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004501 // Registers used over the whole function:
4502 // rbx: the mapped parameter count (untagged)
4503 // rax: the allocated object (tagged).
4504 Factory* factory = isolate()->factory();
4505
4506 __ SmiToInteger64(rbx, rcx);
4507 // rbx = parameter count (untagged)
4508
4509 // Check if the calling frame is an arguments adaptor frame.
4510 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004511 __ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004512 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004513 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4514 __ j(equal, &adaptor_frame);
4515
4516 // No adaptor, parameter count = argument count.
4517 __ movp(r11, rbx);
4518 __ jmp(&try_allocate, Label::kNear);
4519
4520 // We have an adaptor frame. Patch the parameters pointer.
4521 __ bind(&adaptor_frame);
4522 __ SmiToInteger64(
4523 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset));
4524 __ leap(rdx, Operand(rax, r11, times_pointer_size,
4525 StandardFrameConstants::kCallerSPOffset));
4526
4527 // rbx = parameter count (untagged)
4528 // r11 = argument count (untagged)
4529 // Compute the mapped parameter count = min(rbx, r11) in rbx.
4530 __ cmpp(rbx, r11);
4531 __ j(less_equal, &try_allocate, Label::kNear);
4532 __ movp(rbx, r11);
4533
4534 __ bind(&try_allocate);
4535
4536 // Compute the sizes of backing store, parameter map, and arguments object.
4537 // 1. Parameter map, has 2 extra words containing context and backing store.
4538 const int kParameterMapHeaderSize =
4539 FixedArray::kHeaderSize + 2 * kPointerSize;
4540 Label no_parameter_map;
4541 __ xorp(r8, r8);
4542 __ testp(rbx, rbx);
4543 __ j(zero, &no_parameter_map, Label::kNear);
4544 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
4545 __ bind(&no_parameter_map);
4546
4547 // 2. Backing store.
4548 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize));
4549
4550 // 3. Arguments object.
4551 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize));
4552
4553 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004554 __ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004555
4556 // rax = address of new object(s) (tagged)
4557 // r11 = argument count (untagged)
4558 // Get the arguments map from the current native context into r9.
4559 Label has_mapped_parameters, instantiate;
4560 __ movp(r9, NativeContextOperand());
4561 __ testp(rbx, rbx);
4562 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4563
4564 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
4565 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex)));
4566 __ jmp(&instantiate, Label::kNear);
4567
4568 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
4569 __ bind(&has_mapped_parameters);
4570 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex)));
4571 __ bind(&instantiate);
4572
4573 // rax = address of new object (tagged)
4574 // rbx = mapped parameter count (untagged)
4575 // r11 = argument count (untagged)
4576 // r9 = address of arguments map (tagged)
4577 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9);
4578 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
4579 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
4580 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
4581
4582 // Set up the callee in-object property.
4583 __ AssertNotSmi(rdi);
4584 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi);
4585
4586 // Use the length (smi tagged) and set that as an in-object property too.
4587 // Note: r11 is tagged from here on.
4588 __ Integer32ToSmi(r11, r11);
4589 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11);
4590
4591 // Set up the elements pointer in the allocated arguments object.
4592 // If we allocated a parameter map, rdi will point there, otherwise to the
4593 // backing store.
4594 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize));
4595 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
4596
4597 // rax = address of new object (tagged)
4598 // rbx = mapped parameter count (untagged)
4599 // r11 = argument count (tagged)
4600 // rdi = address of parameter map or backing store (tagged)
4601
4602 // Initialize parameter map. If there are no mapped arguments, we're done.
4603 Label skip_parameter_map;
4604 __ testp(rbx, rbx);
4605 __ j(zero, &skip_parameter_map);
4606
4607 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
4608 // rbx contains the untagged argument count. Add 2 and tag to write.
4609 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
4610 __ Integer64PlusConstantToSmi(r9, rbx, 2);
4611 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
4612 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
4613 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4614 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
4615
4616 // Copy the parameter slots and the holes in the arguments.
4617 // We need to fill in mapped_parameter_count slots. They index the context,
4618 // where parameters are stored in reverse order, at
4619 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4620 // The mapped parameter thus need to get indices
4621 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4622 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4623 // We loop from right to left.
4624 Label parameters_loop, parameters_test;
4625
4626 // Load tagged parameter count into r9.
4627 __ Integer32ToSmi(r9, rbx);
4628 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
4629 __ addp(r8, rcx);
4630 __ subp(r8, r9);
4631 __ movp(rcx, rdi);
4632 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
4633 __ SmiToInteger64(r9, r9);
4634 // r9 = loop variable (untagged)
4635 // r8 = mapping index (tagged)
4636 // rcx = address of parameter map (tagged)
4637 // rdi = address of backing store (tagged)
4638 __ jmp(&parameters_test, Label::kNear);
4639
4640 __ bind(&parameters_loop);
4641 __ subp(r9, Immediate(1));
4642 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
4643 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize),
4644 r8);
4645 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize),
4646 kScratchRegister);
4647 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
4648 __ bind(&parameters_test);
4649 __ testp(r9, r9);
4650 __ j(not_zero, &parameters_loop, Label::kNear);
4651
4652 __ bind(&skip_parameter_map);
4653
4654 // r11 = argument count (tagged)
4655 // rdi = address of backing store (tagged)
4656 // Copy arguments header and remaining slots (if there are any).
4657 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
4658 factory->fixed_array_map());
4659 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11);
4660
4661 Label arguments_loop, arguments_test;
4662 __ movp(r8, rbx);
4663 // Untag r11 for the loop below.
4664 __ SmiToInteger64(r11, r11);
4665 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
4666 __ subp(rdx, kScratchRegister);
4667 __ jmp(&arguments_test, Label::kNear);
4668
4669 __ bind(&arguments_loop);
4670 __ subp(rdx, Immediate(kPointerSize));
4671 __ movp(r9, Operand(rdx, 0));
4672 __ movp(FieldOperand(rdi, r8,
4673 times_pointer_size,
4674 FixedArray::kHeaderSize),
4675 r9);
4676 __ addp(r8, Immediate(1));
4677
4678 __ bind(&arguments_test);
4679 __ cmpp(r8, r11);
4680 __ j(less, &arguments_loop, Label::kNear);
4681
4682 // Return.
4683 __ ret(0);
4684
4685 // Do the runtime call to allocate the arguments object.
4686 // r11 = argument count (untagged)
4687 __ bind(&runtime);
4688 __ Integer32ToSmi(r11, r11);
4689 __ PopReturnAddressTo(rax);
4690 __ Push(rdi); // Push function.
4691 __ Push(rdx); // Push parameters pointer.
4692 __ Push(r11); // Push parameter count.
4693 __ PushReturnAddressFrom(rax);
4694 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4695}
4696
4697
4698void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4699 // ----------- S t a t e -------------
4700 // -- rdi : function
4701 // -- rsi : context
4702 // -- rbp : frame pointer
4703 // -- rsp[0] : return address
4704 // -----------------------------------
4705 __ AssertFunction(rdi);
4706
Ben Murdochc5610432016-08-08 18:44:38 +01004707 // Make rdx point to the JavaScript frame.
4708 __ movp(rdx, rbp);
4709 if (skip_stub_frame()) {
4710 // For Ignition we need to skip the handler/stub frame to reach the
4711 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004712 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004713 }
4714 if (FLAG_debug_code) {
4715 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004716 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004717 __ j(equal, &ok);
4718 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4719 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004720 }
4721
4722 // Check if we have an arguments adaptor frame below the function frame.
4723 Label arguments_adaptor, arguments_done;
4724 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004725 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004726 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4727 __ j(equal, &arguments_adaptor, Label::kNear);
4728 {
4729 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
4730 __ LoadSharedFunctionInfoSpecialField(
4731 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
4732 __ leap(rbx, Operand(rdx, rax, times_pointer_size,
4733 StandardFrameConstants::kCallerSPOffset -
4734 1 * kPointerSize));
4735 }
4736 __ jmp(&arguments_done, Label::kNear);
4737 __ bind(&arguments_adaptor);
4738 {
4739 __ SmiToInteger32(
4740 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4741 __ leap(rbx, Operand(rbx, rax, times_pointer_size,
4742 StandardFrameConstants::kCallerSPOffset -
4743 1 * kPointerSize));
4744 }
4745 __ bind(&arguments_done);
4746
4747 // ----------- S t a t e -------------
4748 // -- rax : number of arguments
4749 // -- rbx : pointer to the first argument
Ben Murdoch61f157c2016-09-16 13:49:30 +01004750 // -- rdi : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01004751 // -- rsi : context
4752 // -- rsp[0] : return address
4753 // -----------------------------------
4754
4755 // Allocate space for the strict arguments object plus the backing store.
4756 Label allocate, done_allocate;
4757 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize +
4758 FixedArray::kHeaderSize));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004759 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004760 __ bind(&done_allocate);
4761
4762 // Compute the arguments.length in rdi.
4763 __ Integer32ToSmi(rdi, rax);
4764
4765 // Setup the elements array in rdx.
4766 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
4767 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx);
4768 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi);
4769 {
4770 Label loop, done_loop;
4771 __ Set(rcx, 0);
4772 __ bind(&loop);
4773 __ cmpl(rcx, rax);
4774 __ j(equal, &done_loop, Label::kNear);
4775 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize));
4776 __ movp(
4777 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize),
4778 kScratchRegister);
4779 __ subp(rbx, Immediate(1 * kPointerSize));
4780 __ addl(rcx, Immediate(1));
4781 __ jmp(&loop);
4782 __ bind(&done_loop);
4783 }
4784
4785 // Setup the strict arguments object in rax.
4786 __ leap(rax,
4787 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize));
4788 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx);
4789 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx);
4790 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
4791 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx);
4792 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx);
4793 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi);
4794 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4795 __ Ret();
4796
Ben Murdoch61f157c2016-09-16 13:49:30 +01004797 // Fall back to %AllocateInNewSpace (if not too big).
4798 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004799 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004800 __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
4801 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004802 {
4803 FrameScope scope(masm, StackFrame::INTERNAL);
4804 __ Integer32ToSmi(rax, rax);
4805 __ Integer32ToSmi(rcx, rcx);
4806 __ Push(rax);
4807 __ Push(rbx);
4808 __ Push(rcx);
4809 __ CallRuntime(Runtime::kAllocateInNewSpace);
4810 __ movp(rdx, rax);
4811 __ Pop(rbx);
4812 __ Pop(rax);
4813 __ SmiToInteger32(rax, rax);
4814 }
4815 __ jmp(&done_allocate);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004816
Ben Murdoch61f157c2016-09-16 13:49:30 +01004817 // Fall back to %NewStrictArguments.
4818 __ bind(&too_big_for_new_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004819 __ PopReturnAddressTo(kScratchRegister);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004820 __ Push(rdi);
4821 __ PushReturnAddressFrom(kScratchRegister);
4822 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004823}
4824
4825
4826void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4827 Register context_reg = rsi;
4828 Register slot_reg = rbx;
4829 Register value_reg = rax;
4830 Register cell_reg = r8;
4831 Register cell_details_reg = rdx;
4832 Register cell_value_reg = r9;
4833 Label fast_heapobject_case, fast_smi_case, slow_case;
4834
4835 if (FLAG_debug_code) {
4836 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
4837 __ Check(not_equal, kUnexpectedValue);
4838 }
4839
4840 // Go up context chain to the script context.
4841 for (int i = 0; i < depth(); ++i) {
4842 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4843 context_reg = rdi;
4844 }
4845
4846 // Load the PropertyCell at the specified slot.
4847 __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
4848
4849 // Load PropertyDetails for the cell (actually only the cell_type, kind and
4850 // READ_ONLY bit of attributes).
4851 __ SmiToInteger32(cell_details_reg,
4852 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
4853 __ andl(cell_details_reg,
4854 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
4855 PropertyDetails::KindField::kMask |
4856 PropertyDetails::kAttributesReadOnlyMask));
4857
4858 // Check if PropertyCell holds mutable data.
4859 Label not_mutable_data;
4860 __ cmpl(cell_details_reg,
4861 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4862 PropertyCellType::kMutable) |
4863 PropertyDetails::KindField::encode(kData)));
4864 __ j(not_equal, &not_mutable_data);
4865 __ JumpIfSmi(value_reg, &fast_smi_case);
4866 __ bind(&fast_heapobject_case);
4867 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
4868 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
4869 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
4870 OMIT_SMI_CHECK);
4871 // RecordWriteField clobbers the value register, so we need to reload.
4872 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
4873 __ Ret();
4874 __ bind(&not_mutable_data);
4875
4876 // Check if PropertyCell value matches the new value (relevant for Constant,
4877 // ConstantType and Undefined cells).
4878 Label not_same_value;
4879 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
4880 __ cmpp(cell_value_reg, value_reg);
4881 __ j(not_equal, &not_same_value,
4882 FLAG_debug_code ? Label::kFar : Label::kNear);
4883 // Make sure the PropertyCell is not marked READ_ONLY.
4884 __ testl(cell_details_reg,
4885 Immediate(PropertyDetails::kAttributesReadOnlyMask));
4886 __ j(not_zero, &slow_case);
4887 if (FLAG_debug_code) {
4888 Label done;
4889 // This can only be true for Constant, ConstantType and Undefined cells,
4890 // because we never store the_hole via this stub.
4891 __ cmpl(cell_details_reg,
4892 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4893 PropertyCellType::kConstant) |
4894 PropertyDetails::KindField::encode(kData)));
4895 __ j(equal, &done);
4896 __ cmpl(cell_details_reg,
4897 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4898 PropertyCellType::kConstantType) |
4899 PropertyDetails::KindField::encode(kData)));
4900 __ j(equal, &done);
4901 __ cmpl(cell_details_reg,
4902 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4903 PropertyCellType::kUndefined) |
4904 PropertyDetails::KindField::encode(kData)));
4905 __ Check(equal, kUnexpectedValue);
4906 __ bind(&done);
4907 }
4908 __ Ret();
4909 __ bind(&not_same_value);
4910
4911 // Check if PropertyCell contains data with constant type (and is not
4912 // READ_ONLY).
4913 __ cmpl(cell_details_reg,
4914 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4915 PropertyCellType::kConstantType) |
4916 PropertyDetails::KindField::encode(kData)));
4917 __ j(not_equal, &slow_case, Label::kNear);
4918
4919 // Now either both old and new values must be SMIs or both must be heap
4920 // objects with same map.
4921 Label value_is_heap_object;
4922 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
4923 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
4924 // Old and new values are SMIs, no need for a write barrier here.
4925 __ bind(&fast_smi_case);
4926 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
4927 __ Ret();
4928 __ bind(&value_is_heap_object);
4929 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
4930 Register cell_value_map_reg = cell_value_reg;
4931 __ movp(cell_value_map_reg,
4932 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
4933 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
4934 __ j(equal, &fast_heapobject_case);
4935
4936 // Fallback to the runtime.
4937 __ bind(&slow_case);
4938 __ Integer32ToSmi(slot_reg, slot_reg);
4939 __ PopReturnAddressTo(kScratchRegister);
4940 __ Push(slot_reg);
4941 __ Push(value_reg);
4942 __ Push(kScratchRegister);
4943 __ TailCallRuntime(is_strict(language_mode())
4944 ? Runtime::kStoreGlobalViaContext_Strict
4945 : Runtime::kStoreGlobalViaContext_Sloppy);
4946}
4947
4948
4949static int Offset(ExternalReference ref0, ExternalReference ref1) {
4950 int64_t offset = (ref0.address() - ref1.address());
4951 // Check that fits into int.
4952 DCHECK(static_cast<int>(offset) == offset);
4953 return static_cast<int>(offset);
4954}
4955
4956
4957// Prepares stack to put arguments (aligns and so on). WIN64 calling
4958// convention requires to put the pointer to the return value slot into
4959// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
4960// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
4961// inside the exit frame (not GCed) accessible via StackSpaceOperand.
4962static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
4963 __ EnterApiExitFrame(arg_stack_space);
4964}
4965
4966
4967// Calls an API function. Allocates HandleScope, extracts returned value
4968// from handle and propagates exceptions. Clobbers r14, r15, rbx and
4969// caller-save registers. Restores context. On return removes
4970// stack_space * kPointerSize (GCed).
4971static void CallApiFunctionAndReturn(MacroAssembler* masm,
4972 Register function_address,
4973 ExternalReference thunk_ref,
4974 Register thunk_last_arg, int stack_space,
4975 Operand* stack_space_operand,
4976 Operand return_value_operand,
4977 Operand* context_restore_operand) {
4978 Label prologue;
4979 Label promote_scheduled_exception;
4980 Label delete_allocated_handles;
4981 Label leave_exit_frame;
4982 Label write_back;
4983
4984 Isolate* isolate = masm->isolate();
4985 Factory* factory = isolate->factory();
4986 ExternalReference next_address =
4987 ExternalReference::handle_scope_next_address(isolate);
4988 const int kNextOffset = 0;
4989 const int kLimitOffset = Offset(
4990 ExternalReference::handle_scope_limit_address(isolate), next_address);
4991 const int kLevelOffset = Offset(
4992 ExternalReference::handle_scope_level_address(isolate), next_address);
4993 ExternalReference scheduled_exception_address =
4994 ExternalReference::scheduled_exception_address(isolate);
4995
4996 DCHECK(rdx.is(function_address) || r8.is(function_address));
4997 // Allocate HandleScope in callee-save registers.
4998 Register prev_next_address_reg = r14;
4999 Register prev_limit_reg = rbx;
5000 Register base_reg = r15;
5001 __ Move(base_reg, next_address);
5002 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
5003 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5004 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
5005
5006 if (FLAG_log_timer_events) {
5007 FrameScope frame(masm, StackFrame::MANUAL);
5008 __ PushSafepointRegisters();
5009 __ PrepareCallCFunction(1);
5010 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5011 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5012 1);
5013 __ PopSafepointRegisters();
5014 }
5015
5016 Label profiler_disabled;
5017 Label end_profiler_check;
5018 __ Move(rax, ExternalReference::is_profiling_address(isolate));
5019 __ cmpb(Operand(rax, 0), Immediate(0));
5020 __ j(zero, &profiler_disabled);
5021
5022 // Third parameter is the address of the actual getter function.
5023 __ Move(thunk_last_arg, function_address);
5024 __ Move(rax, thunk_ref);
5025 __ jmp(&end_profiler_check);
5026
5027 __ bind(&profiler_disabled);
5028 // Call the api function!
5029 __ Move(rax, function_address);
5030
5031 __ bind(&end_profiler_check);
5032
5033 // Call the api function!
5034 __ call(rax);
5035
5036 if (FLAG_log_timer_events) {
5037 FrameScope frame(masm, StackFrame::MANUAL);
5038 __ PushSafepointRegisters();
5039 __ PrepareCallCFunction(1);
5040 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5041 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5042 1);
5043 __ PopSafepointRegisters();
5044 }
5045
5046 // Load the value from ReturnValue
5047 __ movp(rax, return_value_operand);
5048 __ bind(&prologue);
5049
5050 // No more valid handles (the result handle was the last one). Restore
5051 // previous handle scope.
5052 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
5053 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
5054 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5055 __ j(not_equal, &delete_allocated_handles);
5056
5057 // Leave the API exit frame.
5058 __ bind(&leave_exit_frame);
5059 bool restore_context = context_restore_operand != NULL;
5060 if (restore_context) {
5061 __ movp(rsi, *context_restore_operand);
5062 }
5063 if (stack_space_operand != nullptr) {
5064 __ movp(rbx, *stack_space_operand);
5065 }
5066 __ LeaveApiExitFrame(!restore_context);
5067
5068 // Check if the function scheduled an exception.
5069 __ Move(rdi, scheduled_exception_address);
5070 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5071 __ j(not_equal, &promote_scheduled_exception);
5072
5073#if DEBUG
5074 // Check if the function returned a valid JavaScript value.
5075 Label ok;
5076 Register return_value = rax;
5077 Register map = rcx;
5078
5079 __ JumpIfSmi(return_value, &ok, Label::kNear);
5080 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
5081
5082 __ CmpInstanceType(map, LAST_NAME_TYPE);
5083 __ j(below_equal, &ok, Label::kNear);
5084
5085 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5086 __ j(above_equal, &ok, Label::kNear);
5087
5088 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5089 __ j(equal, &ok, Label::kNear);
5090
5091 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
5092 __ j(equal, &ok, Label::kNear);
5093
5094 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
5095 __ j(equal, &ok, Label::kNear);
5096
5097 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
5098 __ j(equal, &ok, Label::kNear);
5099
5100 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
5101 __ j(equal, &ok, Label::kNear);
5102
5103 __ Abort(kAPICallReturnedInvalidObject);
5104
5105 __ bind(&ok);
5106#endif
5107
5108 if (stack_space_operand != nullptr) {
5109 DCHECK_EQ(stack_space, 0);
5110 __ PopReturnAddressTo(rcx);
5111 __ addq(rsp, rbx);
5112 __ jmp(rcx);
5113 } else {
5114 __ ret(stack_space * kPointerSize);
5115 }
5116
5117 // Re-throw by promoting a scheduled exception.
5118 __ bind(&promote_scheduled_exception);
5119 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5120
5121 // HandleScope limit has changed. Delete allocated extensions.
5122 __ bind(&delete_allocated_handles);
5123 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
5124 __ movp(prev_limit_reg, rax);
5125 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5126 __ LoadAddress(rax,
5127 ExternalReference::delete_handle_scope_extensions(isolate));
5128 __ call(rax);
5129 __ movp(rax, prev_limit_reg);
5130 __ jmp(&leave_exit_frame);
5131}
5132
Ben Murdochda12d292016-06-02 14:46:10 +01005133void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005134 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005135 // -- rdi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005136 // -- rbx : call_data
5137 // -- rcx : holder
5138 // -- rdx : api_function_address
5139 // -- rsi : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005140 // -- rax : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005141 // -- rsp[0] : return address
5142 // -- rsp[8] : last argument
5143 // -- ...
5144 // -- rsp[argc * 8] : first argument
5145 // -- rsp[(argc + 1) * 8] : receiver
5146 // -----------------------------------
5147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005148 Register callee = rdi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005149 Register call_data = rbx;
5150 Register holder = rcx;
5151 Register api_function_address = rdx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005152 Register context = rsi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005153 Register return_address = r8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005154
5155 typedef FunctionCallbackArguments FCA;
5156
5157 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5158 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5159 STATIC_ASSERT(FCA::kDataIndex == 4);
5160 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5161 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5162 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5163 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005164 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5165 STATIC_ASSERT(FCA::kArgsLength == 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005166
5167 __ PopReturnAddressTo(return_address);
5168
Ben Murdochc5610432016-08-08 18:44:38 +01005169 // new target
5170 __ PushRoot(Heap::kUndefinedValueRootIndex);
5171
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005172 // context save
5173 __ Push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005174
5175 // callee
5176 __ Push(callee);
5177
5178 // call data
5179 __ Push(call_data);
5180 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005181 if (!this->call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005182 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5183 }
5184 // return value
5185 __ Push(scratch);
5186 // return value default
5187 __ Push(scratch);
5188 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005189 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005190 __ Push(scratch);
5191 // holder
5192 __ Push(holder);
5193
5194 __ movp(scratch, rsp);
5195 // Push return address back on stack.
5196 __ PushReturnAddressFrom(return_address);
5197
Ben Murdochda12d292016-06-02 14:46:10 +01005198 if (!this->is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005199 // load context from callee
5200 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5201 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005202
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005203 // Allocate the v8::Arguments structure in the arguments' space since
5204 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005205 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005207 PrepareCallApiFunction(masm, kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005208
5209 // FunctionCallbackInfo::implicit_args_.
Ben Murdochda12d292016-06-02 14:46:10 +01005210 int argc = this->argc();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005211 __ movp(StackSpaceOperand(0), scratch);
Ben Murdochda12d292016-06-02 14:46:10 +01005212 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5213 // FunctionCallbackInfo::values_.
5214 __ movp(StackSpaceOperand(1), scratch);
5215 // FunctionCallbackInfo::length_.
5216 __ Set(StackSpaceOperand(2), argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005217
5218#if defined(__MINGW64__) || defined(_WIN64)
5219 Register arguments_arg = rcx;
5220 Register callback_arg = rdx;
5221#else
5222 Register arguments_arg = rdi;
5223 Register callback_arg = rsi;
5224#endif
5225
5226 // It's okay if api_function_address == callback_arg
5227 // but not arguments_arg
5228 DCHECK(!api_function_address.is(arguments_arg));
5229
5230 // v8::InvocationCallback's argument.
5231 __ leap(arguments_arg, StackSpaceOperand(0));
5232
5233 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005234 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005235
5236 // Accessor for FunctionCallbackInfo and first js arg.
5237 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5238 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5239 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5240 FCA::kArgsLength - FCA::kContextSaveIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01005241 Operand length_operand = StackSpaceOperand(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005242 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
Ben Murdochda12d292016-06-02 14:46:10 +01005243 this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005244 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005245 Operand* stack_space_operand = &length_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005246 stack_space = argc + FCA::kArgsLength + 1;
5247 stack_space_operand = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005248 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5249 stack_space, stack_space_operand,
5250 return_value_operand, &context_restore_operand);
5251}
5252
5253
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005254void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005255#if defined(__MINGW64__) || defined(_WIN64)
5256 Register getter_arg = r8;
5257 Register accessor_info_arg = rdx;
5258 Register name_arg = rcx;
5259#else
5260 Register getter_arg = rdx;
5261 Register accessor_info_arg = rsi;
5262 Register name_arg = rdi;
5263#endif
Ben Murdochc5610432016-08-08 18:44:38 +01005264 Register api_function_address = r8;
5265 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5266 Register holder = ApiGetterDescriptor::HolderRegister();
5267 Register callback = ApiGetterDescriptor::CallbackRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005268 Register scratch = rax;
Ben Murdochc5610432016-08-08 18:44:38 +01005269 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5270
5271 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5272 // name below the exit frame to make GC aware of them.
5273 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5274 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5275 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5276 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5277 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5278 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5279 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5280 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
5281
5282 // Insert additional parameters into the stack frame above return address.
5283 __ PopReturnAddressTo(scratch);
5284 __ Push(receiver);
5285 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset));
5286 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
5287 __ Push(kScratchRegister); // return value
5288 __ Push(kScratchRegister); // return value default
5289 __ PushAddress(ExternalReference::isolate_address(isolate()));
5290 __ Push(holder);
5291 __ Push(Smi::FromInt(0)); // should_throw_on_error -> false
5292 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset));
5293 __ PushReturnAddressFrom(scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005294
Ben Murdoch097c5b22016-05-18 11:27:45 +01005295 // v8::PropertyCallbackInfo::args_ array and name handle.
5296 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005297
Ben Murdoch097c5b22016-05-18 11:27:45 +01005298 // Allocate v8::PropertyCallbackInfo in non-GCed stack space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005299 const int kArgStackSpace = 1;
5300
Ben Murdoch097c5b22016-05-18 11:27:45 +01005301 // Load address of v8::PropertyAccessorInfo::args_ array.
5302 __ leap(scratch, Operand(rsp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005303
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005304 PrepareCallApiFunction(masm, kArgStackSpace);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005305 // Create v8::PropertyCallbackInfo object on the stack and initialize
5306 // it's args_ field.
5307 Operand info_object = StackSpaceOperand(0);
5308 __ movp(info_object, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005309
Ben Murdoch097c5b22016-05-18 11:27:45 +01005310 __ leap(name_arg, Operand(scratch, -kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005311 // The context register (rsi) has been saved in PrepareCallApiFunction and
5312 // could be used to pass arguments.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005313 __ leap(accessor_info_arg, info_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005314
5315 ExternalReference thunk_ref =
5316 ExternalReference::invoke_accessor_getter_callback(isolate());
5317
5318 // It's okay if api_function_address == getter_arg
5319 // but not accessor_info_arg or name_arg
Ben Murdochc5610432016-08-08 18:44:38 +01005320 DCHECK(!api_function_address.is(accessor_info_arg));
5321 DCHECK(!api_function_address.is(name_arg));
5322 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
5323 __ movp(api_function_address,
5324 FieldOperand(scratch, Foreign::kForeignAddressOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005325
Ben Murdoch097c5b22016-05-18 11:27:45 +01005326 // +3 is to skip prolog, return address and name handle.
5327 Operand return_value_operand(
5328 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005329 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005330 kStackUnwindSpace, nullptr, return_value_operand,
5331 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005332}
5333
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005334#undef __
5335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005336} // namespace internal
5337} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005338
5339#endif // V8_TARGET_ARCH_X64