blob: 5761b1627578f8136ed6f0798edf935bb2038df7 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_IA32
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochda12d292016-06-02 14:46:10 +01007#include "src/code-stubs.h"
8#include "src/api-arguments.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/ia32/code-stubs-ia32.h"
13#include "src/ia32/frames-ia32.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/ic/handler-compiler.h"
15#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000016#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000017#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000018#include "src/regexp/jsregexp.h"
19#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040020#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010021
22namespace v8 {
23namespace internal {
24
Ben Murdoch61f157c2016-09-16 13:49:30 +010025#define __ ACCESS_MASM(masm)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026
Ben Murdoch61f157c2016-09-16 13:49:30 +010027void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28 __ pop(ecx);
29 __ mov(MemOperand(esp, eax, times_4, 0), edi);
30 __ push(edi);
31 __ push(ebx);
32 __ push(ecx);
33 __ add(eax, Immediate(3));
34 __ TailCallRuntime(Runtime::kNewArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035}
36
Ben Murdochda12d292016-06-02 14:46:10 +010037void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
38 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
39 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
40}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041
Ben Murdoch61f157c2016-09-16 13:49:30 +010042void FastFunctionBindStub::InitializeDescriptor(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 CodeStubDescriptor* descriptor) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010044 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
45 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046}
47
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
49 ExternalReference miss) {
50 // Update the static counter each time a new code stub is generated.
51 isolate()->counters()->code_stubs()->Increment();
52
53 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 {
56 // Call the runtime system in a fresh internal frame.
57 FrameScope scope(masm, StackFrame::INTERNAL);
58 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 eax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 // Push arguments
61 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062 __ push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063 }
64 __ CallExternalReference(miss, param_count);
65 }
66
Steve Block1e0659c2011-05-24 12:43:12 +010067 __ ret(0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +000068}
69
70
Ben Murdoch3ef787d2012-04-12 10:51:47 +010071void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
72 // We don't allow a GC during a store buffer overflow so there is no need to
73 // store the registers in any particular way, but we do have to store and
74 // restore them.
75 __ pushad();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 if (save_doubles()) {
77 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
78 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010079 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 __ movsd(Operand(esp, i * kDoubleSize), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010081 }
82 }
83 const int argument_count = 1;
84
85 AllowExternalCallThatCantCauseGC scope(masm);
86 __ PrepareCallCFunction(argument_count, ecx);
87 __ mov(Operand(esp, 0 * kPointerSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 Immediate(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000090 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +010091 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000092 if (save_doubles()) {
93 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010094 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 __ movsd(reg, Operand(esp, i * kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010096 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010098 }
99 __ popad();
100 __ ret(0);
101}
102
103
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100104class FloatingPointHelper : public AllStatic {
105 public:
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100106 enum ArgLocation {
107 ARGS_ON_STACK,
108 ARGS_IN_REGISTERS
109 };
110
111 // Code pattern for loading a floating point value. Input value must
112 // be either a smi or a heap number object (fp value). Requirements:
113 // operand in register number. Returns operand as floating point number
114 // on FPU stack.
115 static void LoadFloatOperand(MacroAssembler* masm, Register number);
116
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100117 // Test if operands are smi or number objects (fp). Requirements:
118 // operand_1 in eax, operand_2 in edx; falls through on float
119 // operands, jumps to the non_float label otherwise.
120 static void CheckFloatOperands(MacroAssembler* masm,
121 Label* non_float,
122 Register scratch);
123
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100124 // Test if operands are numbers (smi or HeapNumber objects), and load
125 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
126 // either operand is not a number. Operands are in edx and eax.
127 // Leaves operands unchanged.
128 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100129};
130
131
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132void DoubleToIStub::Generate(MacroAssembler* masm) {
133 Register input_reg = this->source();
134 Register final_result_reg = this->destination();
135 DCHECK(is_truncating());
136
137 Label check_negative, process_64_bits, done, done_no_stash;
138
139 int double_offset = offset();
140
141 // Account for return address and saved regs if input is esp.
142 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
143
144 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
145 MemOperand exponent_operand(MemOperand(input_reg,
146 double_offset + kDoubleSize / 2));
147
148 Register scratch1;
149 {
150 Register scratch_candidates[3] = { ebx, edx, edi };
151 for (int i = 0; i < 3; i++) {
152 scratch1 = scratch_candidates[i];
153 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
154 }
155 }
156 // Since we must use ecx for shifts below, use some other register (eax)
157 // to calculate the result if ecx is the requested return register.
158 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
159 // Save ecx if it isn't the return register and therefore volatile, or if it
160 // is the return register, then save the temp register we use in its stead for
161 // the result.
162 Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
163 __ push(scratch1);
164 __ push(save_reg);
165
166 bool stash_exponent_copy = !input_reg.is(esp);
167 __ mov(scratch1, mantissa_operand);
168 if (CpuFeatures::IsSupported(SSE3)) {
169 CpuFeatureScope scope(masm, SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +0000170 // Load x87 register with heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 __ fld_d(mantissa_operand);
172 }
173 __ mov(ecx, exponent_operand);
174 if (stash_exponent_copy) __ push(ecx);
175
176 __ and_(ecx, HeapNumber::kExponentMask);
177 __ shr(ecx, HeapNumber::kExponentShift);
178 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
179 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
180 __ j(below, &process_64_bits);
181
182 // Result is entirely in lower 32-bits of mantissa
183 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
184 if (CpuFeatures::IsSupported(SSE3)) {
185 __ fstp(0);
186 }
187 __ sub(ecx, Immediate(delta));
188 __ xor_(result_reg, result_reg);
189 __ cmp(ecx, Immediate(31));
190 __ j(above, &done);
191 __ shl_cl(scratch1);
192 __ jmp(&check_negative);
193
194 __ bind(&process_64_bits);
195 if (CpuFeatures::IsSupported(SSE3)) {
196 CpuFeatureScope scope(masm, SSE3);
197 if (stash_exponent_copy) {
198 // Already a copy of the exponent on the stack, overwrite it.
199 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
200 __ sub(esp, Immediate(kDoubleSize / 2));
201 } else {
202 // Reserve space for 64 bit answer.
203 __ sub(esp, Immediate(kDoubleSize)); // Nolint.
204 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000205 // Do conversion, which cannot fail because we checked the exponent.
206 __ fisttp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000207 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
208 __ add(esp, Immediate(kDoubleSize));
209 __ jmp(&done_no_stash);
Ben Murdoch257744e2011-11-30 15:57:28 +0000210 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 // Result must be extracted from shifted 32-bit mantissa
212 __ sub(ecx, Immediate(delta));
213 __ neg(ecx);
214 if (stash_exponent_copy) {
215 __ mov(result_reg, MemOperand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100216 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 __ mov(result_reg, exponent_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100218 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219 __ and_(result_reg,
220 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
221 __ add(result_reg,
222 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
Ben Murdochda12d292016-06-02 14:46:10 +0100223 __ shrd_cl(scratch1, result_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 __ shr_cl(result_reg);
225 __ test(ecx, Immediate(32));
226 __ cmov(not_equal, scratch1, result_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100227 }
228
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000229 // If the double was negative, negate the integer result.
230 __ bind(&check_negative);
231 __ mov(result_reg, scratch1);
232 __ neg(result_reg);
233 if (stash_exponent_copy) {
234 __ cmp(MemOperand(esp, 0), Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100235 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000236 __ cmp(exponent_operand, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100237 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 __ cmov(greater, result_reg, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100239
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 // Restore registers
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100241 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 if (stash_exponent_copy) {
243 __ add(esp, Immediate(kDoubleSize / 2));
244 }
245 __ bind(&done_no_stash);
246 if (!final_result_reg.is(result_reg)) {
247 DCHECK(final_result_reg.is(ecx));
248 __ mov(final_result_reg, result_reg);
249 }
250 __ pop(save_reg);
251 __ pop(scratch1);
252 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100253}
254
255
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100256void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
257 Register number) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000258 Label load_smi, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100259
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000260 __ JumpIfSmi(number, &load_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100261 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000262 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100263
264 __ bind(&load_smi);
265 __ SmiUntag(number);
266 __ push(number);
267 __ fild_s(Operand(esp, 0));
268 __ pop(number);
269
270 __ bind(&done);
271}
272
273
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100274void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
275 Label* not_numbers) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100277 // Load operand in edx into xmm0, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000278 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100279 Factory* factory = masm->isolate()->factory();
280 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100281 __ j(not_equal, not_numbers); // Argument in edx is not a number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000282 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100283 __ bind(&load_eax);
284 // Load operand in eax into xmm1, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000285 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100286 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 __ j(equal, &load_float_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100288 __ jmp(not_numbers); // Argument in eax is not a number.
289 __ bind(&load_smi_edx);
290 __ SmiUntag(edx); // Untag smi before converting to float.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 __ Cvtsi2sd(xmm0, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100292 __ SmiTag(edx); // Retag smi for heap number overwriting test.
293 __ jmp(&load_eax);
294 __ bind(&load_smi_eax);
295 __ SmiUntag(eax); // Untag smi before converting to float.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296 __ Cvtsi2sd(xmm1, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100297 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Ben Murdoch257744e2011-11-30 15:57:28 +0000298 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100299 __ bind(&load_float_eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000300 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100301 __ bind(&done);
302}
303
304
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100305void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
306 Label* non_float,
307 Register scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000308 Label test_other, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100309 // Test if both operands are floats or smi -> scratch=k_is_float;
310 // Otherwise scratch = k_not_float.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000311 __ JumpIfSmi(edx, &test_other, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100312 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100313 Factory* factory = masm->isolate()->factory();
314 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100315 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
316
317 __ bind(&test_other);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000318 __ JumpIfSmi(eax, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100319 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100320 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100321 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
322
323 // Fall-through: Both operands are numbers.
324 __ bind(&done);
325}
326
327
Ben Murdochb0fe1622011-05-05 13:52:32 +0100328void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 Factory* factory = isolate()->factory();
330 const Register exponent = MathPowTaggedDescriptor::exponent();
331 DCHECK(exponent.is(eax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100332 const Register base = edx;
333 const Register scratch = ecx;
334 const XMMRegister double_result = xmm3;
335 const XMMRegister double_base = xmm2;
336 const XMMRegister double_exponent = xmm1;
337 const XMMRegister double_scratch = xmm4;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100338
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100339 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100340
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100341 // Save 1 in double_result - we need this several times later on.
342 __ mov(scratch, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343 __ Cvtsi2sd(double_result, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100346 Label base_is_smi, unpack_exponent;
347 // The exponent and base are supplied as arguments on the stack.
348 // This can only happen if the stub is called from non-optimized code.
349 // Load input parameters from stack.
350 __ mov(base, Operand(esp, 2 * kPointerSize));
351 __ mov(exponent, Operand(esp, 1 * kPointerSize));
352
353 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
354 __ cmp(FieldOperand(base, HeapObject::kMapOffset),
355 factory->heap_number_map());
356 __ j(not_equal, &call_runtime);
357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100359 __ jmp(&unpack_exponent, Label::kNear);
360
361 __ bind(&base_is_smi);
362 __ SmiUntag(base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 __ Cvtsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100364
365 __ bind(&unpack_exponent);
366 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
367 __ SmiUntag(exponent);
368 __ jmp(&int_exponent);
369
370 __ bind(&exponent_not_smi);
371 __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
372 factory->heap_number_map());
373 __ j(not_equal, &call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000374 __ movsd(double_exponent,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100375 FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000376 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100377 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
378 __ SmiUntag(exponent);
379 __ jmp(&int_exponent);
380
381 __ bind(&exponent_not_smi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 __ movsd(double_exponent,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100383 FieldOperand(exponent, HeapNumber::kValueOffset));
384 }
385
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 if (exponent_type() != INTEGER) {
387 Label fast_power, try_arithmetic_simplification;
388 __ DoubleToI(exponent, double_exponent, double_scratch,
389 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
390 &try_arithmetic_simplification,
391 &try_arithmetic_simplification);
392 __ jmp(&int_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100393
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 __ bind(&try_arithmetic_simplification);
395 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
396 __ cvttsd2si(exponent, Operand(double_exponent));
397 __ cmp(exponent, Immediate(0x1));
398 __ j(overflow, &call_runtime);
399
400 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100401 // Detect square root case. Crankshaft detects constant +/-0.5 at
402 // compile time and uses DoMathPowHalf instead. We then skip this check
403 // for non-constant cases of +/-0.5 as these hardly occur.
404 Label continue_sqrt, continue_rsqrt, not_plus_half;
405 // Test for 0.5.
406 // Load double_scratch with 0.5.
407 __ mov(scratch, Immediate(0x3F000000u));
408 __ movd(double_scratch, scratch);
409 __ cvtss2sd(double_scratch, double_scratch);
410 // Already ruled out NaNs for exponent.
411 __ ucomisd(double_scratch, double_exponent);
412 __ j(not_equal, &not_plus_half, Label::kNear);
413
414 // Calculates square root of base. Check for the special case of
415 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
416 // According to IEEE-754, single-precision -Infinity has the highest
417 // 9 bits set and the lowest 23 bits cleared.
418 __ mov(scratch, 0xFF800000u);
419 __ movd(double_scratch, scratch);
420 __ cvtss2sd(double_scratch, double_scratch);
421 __ ucomisd(double_base, double_scratch);
422 // Comparing -Infinity with NaN results in "unordered", which sets the
423 // zero flag as if both were equal. However, it also sets the carry flag.
424 __ j(not_equal, &continue_sqrt, Label::kNear);
425 __ j(carry, &continue_sqrt, Label::kNear);
426
427 // Set result to Infinity in the special case.
428 __ xorps(double_result, double_result);
429 __ subsd(double_result, double_scratch);
430 __ jmp(&done);
431
432 __ bind(&continue_sqrt);
433 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
434 __ xorps(double_scratch, double_scratch);
435 __ addsd(double_scratch, double_base); // Convert -0 to +0.
436 __ sqrtsd(double_result, double_scratch);
437 __ jmp(&done);
438
439 // Test for -0.5.
440 __ bind(&not_plus_half);
441 // Load double_exponent with -0.5 by substracting 1.
442 __ subsd(double_scratch, double_result);
443 // Already ruled out NaNs for exponent.
444 __ ucomisd(double_scratch, double_exponent);
445 __ j(not_equal, &fast_power, Label::kNear);
446
447 // Calculates reciprocal of square root of base. Check for the special
448 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
449 // According to IEEE-754, single-precision -Infinity has the highest
450 // 9 bits set and the lowest 23 bits cleared.
451 __ mov(scratch, 0xFF800000u);
452 __ movd(double_scratch, scratch);
453 __ cvtss2sd(double_scratch, double_scratch);
454 __ ucomisd(double_base, double_scratch);
455 // Comparing -Infinity with NaN results in "unordered", which sets the
456 // zero flag as if both were equal. However, it also sets the carry flag.
457 __ j(not_equal, &continue_rsqrt, Label::kNear);
458 __ j(carry, &continue_rsqrt, Label::kNear);
459
460 // Set result to 0 in the special case.
461 __ xorps(double_result, double_result);
462 __ jmp(&done);
463
464 __ bind(&continue_rsqrt);
465 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
466 __ xorps(double_exponent, double_exponent);
467 __ addsd(double_exponent, double_base); // Convert -0 to +0.
468 __ sqrtsd(double_exponent, double_exponent);
469 __ divsd(double_result, double_exponent);
470 __ jmp(&done);
471 }
472
473 // Using FPU instructions to calculate power.
474 Label fast_power_failed;
475 __ bind(&fast_power);
476 __ fnclex(); // Clear flags to catch exceptions later.
477 // Transfer (B)ase and (E)xponent onto the FPU register stack.
478 __ sub(esp, Immediate(kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 __ movsd(Operand(esp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 __ fld_d(Operand(esp, 0)); // E
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ movsd(Operand(esp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100482 __ fld_d(Operand(esp, 0)); // B, E
483
484 // Exponent is in st(1) and base is in st(0)
485 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
486 // FYL2X calculates st(1) * log2(st(0))
487 __ fyl2x(); // X
488 __ fld(0); // X, X
489 __ frndint(); // rnd(X), X
490 __ fsub(1); // rnd(X), X-rnd(X)
491 __ fxch(1); // X - rnd(X), rnd(X)
492 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
493 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
494 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 // FSCALE calculates st(0) * 2^st(1)
497 __ fscale(); // 2^X, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498 __ fstp(1); // 2^X
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100499 // Bail out to runtime in case of exceptions in the status word.
500 __ fnstsw_ax();
Ben Murdochda12d292016-06-02 14:46:10 +0100501 __ test_b(eax,
502 Immediate(0x5F)); // We check for all but precision exception.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100503 __ j(not_zero, &fast_power_failed, Label::kNear);
504 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 __ movsd(double_result, Operand(esp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100506 __ add(esp, Immediate(kDoubleSize));
507 __ jmp(&done);
508
509 __ bind(&fast_power_failed);
510 __ fninit();
511 __ add(esp, Immediate(kDoubleSize));
512 __ jmp(&call_runtime);
513 }
514
515 // Calculate power with integer exponent.
516 __ bind(&int_exponent);
517 const XMMRegister double_scratch2 = double_exponent;
518 __ mov(scratch, exponent); // Back up exponent.
519 __ movsd(double_scratch, double_base); // Back up base.
520 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100521
522 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000523 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100524 __ test(scratch, scratch);
525 __ j(positive, &no_neg, Label::kNear);
526 __ neg(scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100527 __ bind(&no_neg);
528
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 __ j(zero, &while_false, Label::kNear);
530 __ shr(scratch, 1);
531 // Above condition means CF==0 && ZF==0. This means that the
532 // bit that has been shifted out is 0 and the result is not 0.
533 __ j(above, &while_true, Label::kNear);
534 __ movsd(double_result, double_scratch);
535 __ j(zero, &while_false, Label::kNear);
536
Ben Murdoch85b71792012-04-11 18:30:58 +0100537 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100538 __ shr(scratch, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100539 __ mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000540 __ j(above, &while_true, Label::kNear);
541 __ mulsd(double_result, double_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100542 __ j(not_zero, &while_true);
543
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100545 // scratch has the original value of the exponent - if the exponent is
546 // negative, return 1/result.
547 __ test(exponent, exponent);
548 __ j(positive, &done);
549 __ divsd(double_scratch2, double_result);
550 __ movsd(double_result, double_scratch2);
551 // Test whether result is zero. Bail out to check for subnormal result.
552 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
553 __ xorps(double_scratch2, double_scratch2);
554 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
555 // double_exponent aliased as double_scratch2 has already been overwritten
556 // and may not have contained the exponent value in the first place when the
557 // exponent is a smi. We reset it with exponent value before bailing out.
558 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 __ Cvtsi2sd(double_exponent, exponent);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100560
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100561 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100563 // The arguments are still on the stack.
564 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100566
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100567 // The stub is called from non-optimized code, which expects the result
568 // as heap number in exponent.
569 __ bind(&done);
570 __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100572 __ ret(2 * kPointerSize);
573 } else {
574 __ bind(&call_runtime);
575 {
576 AllowExternalCallThatCantCauseGC scope(masm);
577 __ PrepareCallCFunction(4, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578 __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
579 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100580 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 }
583 // Return value is in st(0) on ia32.
584 // Store it into the (fixed) result register.
585 __ sub(esp, Immediate(kDoubleSize));
586 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 __ movsd(double_result, Operand(esp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100588 __ add(esp, Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100589
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 __ ret(0);
592 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100593}
594
595
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000596void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
597 Label miss;
598 Register receiver = LoadDescriptor::ReceiverRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599 // With careful management, we won't have to save slot and vector on
600 // the stack. Simply handle the possibly missing case first.
601 // TODO(mvstanton): this code can be more efficient.
602 __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
603 Immediate(isolate()->factory()->the_hole_value()));
604 __ j(equal, &miss);
605 __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
606 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 __ bind(&miss);
609 PropertyAccessCompiler::TailCallBuiltin(
610 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
611}
612
613
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400614void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
615 // Return address is on the stack.
616 Label miss;
617
618 Register receiver = LoadDescriptor::ReceiverRegister();
619 Register index = LoadDescriptor::NameRegister();
620 Register scratch = edi;
621 DCHECK(!scratch.is(receiver) && !scratch.is(index));
622 Register result = eax;
623 DCHECK(!result.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
625 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400626
627 // StringCharAtGenerator doesn't use the result register until it's passed
628 // the different miss possibilities. If it did, we would have a conflict
629 // when FLAG_vector_ics is true.
630 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
631 &miss, // When not a string.
632 &miss, // When not a number.
633 &miss, // When index out of range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400634 RECEIVER_IS_STRING);
635 char_at_generator.GenerateFast(masm);
636 __ ret(0);
637
638 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640
641 __ bind(&miss);
642 PropertyAccessCompiler::TailCallBuiltin(
643 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
644}
645
646
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100647void RegExpExecStub::Generate(MacroAssembler* masm) {
648 // Just jump directly to runtime if native RegExp is not selected at compile
649 // time or if regexp entry in generated code is turned off runtime switch or
650 // at compilation.
651#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000652 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100653#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100654
655 // Stack frame on entry.
656 // esp[0]: return address
657 // esp[4]: last_match_info (expected JSArray)
658 // esp[8]: previous index
659 // esp[12]: subject string
660 // esp[16]: JSRegExp object
661
662 static const int kLastMatchInfoOffset = 1 * kPointerSize;
663 static const int kPreviousIndexOffset = 2 * kPointerSize;
664 static const int kSubjectOffset = 3 * kPointerSize;
665 static const int kJSRegExpOffset = 4 * kPointerSize;
666
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667 Label runtime;
668 Factory* factory = isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100669
670 // Ensure that a RegExp stack is allocated.
671 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000672 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100673 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100675 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100676 __ test(ebx, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000677 __ j(zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100678
679 // Check that the first argument is a JSRegExp object.
680 __ mov(eax, Operand(esp, kJSRegExpOffset));
681 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000682 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100683 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
684 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100686 // Check that the RegExp has been compiled (data contains a fixed array).
687 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
688 if (FLAG_debug_code) {
689 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000690 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100691 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100693 }
694
695 // ecx: RegExp data (FixedArray)
696 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
697 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100698 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100699 __ j(not_equal, &runtime);
700
701 // ecx: RegExp data (FixedArray)
702 // Check that the number of captures fit in the static offsets vector buffer.
703 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 // Check (number_of_captures + 1) * 2 <= offsets vector size
705 // Or number_of_captures * 2 <= offsets vector size - 2
706 // Multiplying by 2 comes for free since edx is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100707 STATIC_ASSERT(kSmiTag == 0);
708 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
710 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100711 __ j(above, &runtime);
712
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000713 // Reset offset for possibly sliced string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 __ Move(edi, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100715 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 __ JumpIfSmi(eax, &runtime);
717 __ mov(edx, eax); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718
719 // eax: subject string
720 // edx: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 // ecx: RegExp data (FixedArray)
722 // Handle subject string according to its encoding and representation:
723 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100724 // (2) Sequential one byte? If yes, go to (5).
725 // (3) Sequential or cons? If not, go to (6).
726 // (4) Cons string. If the string is flat, replace subject with first string
727 // and go to (1). Otherwise bail out to runtime.
728 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 // (E) Carry on.
730 /// [...]
731
732 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100733 // (6) Long external string? If not, go to (10).
734 // (7) External string. Make it, offset-wise, look like a sequential string.
735 // (8) Is the external string one byte? If yes, go to (5).
736 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100738 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000739
Ben Murdoch097c5b22016-05-18 11:27:45 +0100740 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
741 external_string /* 7 */, check_underlying /* 1 */,
742 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743
Ben Murdoch097c5b22016-05-18 11:27:45 +0100744 __ bind(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100746 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
747 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
748
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100749 __ and_(ebx, kIsNotStringMask |
750 kStringRepresentationMask |
751 kStringEncodingMask |
752 kShortExternalStringMask);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100753 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 __ j(zero, &seq_two_byte_string); // Go to (9).
755
Ben Murdoch097c5b22016-05-18 11:27:45 +0100756 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 __ and_(ebx, Immediate(kIsNotStringMask |
759 kStringRepresentationMask |
760 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100761 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100762
Ben Murdoch097c5b22016-05-18 11:27:45 +0100763 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764 // We check whether the subject string is a cons, since sequential strings
765 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000766 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
767 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100768 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
769 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
770 __ cmp(ebx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100771 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100772
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 // (4) Cons string. Check that it's flat.
774 // Replace subject with first string and reload instance type.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000775 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100776 __ j(not_equal, &runtime);
777 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100778 __ jmp(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100779
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 // eax: sequential subject string (or look-alike, external string)
781 // edx: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100782 // ecx: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100783 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784 __ bind(&seq_one_byte_string);
785 // Load previous index and check range before edx is overwritten. We have
786 // to use edx instead of eax here because it might have been only made to
787 // look like a sequential string when it actually is an external string.
788 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
789 __ JumpIfNotSmi(ebx, &runtime);
790 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
791 __ j(above_equal, &runtime);
792 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
793 __ Move(ecx, Immediate(1)); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100794
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100796 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000797 // edx: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100798 // Check that the irregexp code has been generated for the actual string
799 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +0000800 // a smi (code flushing support).
801 __ JumpIfSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100802
803 // eax: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000804 // ebx: previous index (smi)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100805 // edx: code
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000806 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100807 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +0100809 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100810
Steve Block44f0eee2011-05-26 01:26:41 +0100811 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000812 static const int kRegExpExecuteArguments = 9;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100813 __ EnterApiExitFrame(kRegExpExecuteArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100814
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000815 // Argument 9: Pass current isolate address.
816 __ mov(Operand(esp, 8 * kPointerSize),
817 Immediate(ExternalReference::isolate_address(isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +0100818
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 // Argument 8: Indicate that this is a direct call from JavaScript.
820 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100821
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000822 // Argument 7: Start (high end) of backtracking stack memory area.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000823 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
824 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000825 __ mov(Operand(esp, 6 * kPointerSize), esi);
826
827 // Argument 6: Set the number of capture registers to zero to force global
828 // regexps to behave as non-global. This does not affect non-global regexps.
829 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100830
831 // Argument 5: static offsets vector buffer.
832 __ mov(Operand(esp, 4 * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +0100833 Immediate(ExternalReference::address_of_static_offsets_vector(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100835
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000836 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000837 __ SmiUntag(ebx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000838 __ mov(Operand(esp, 1 * kPointerSize), ebx);
839
840 // Argument 1: Original subject string.
841 // The original subject is in the previous stack frame. Therefore we have to
842 // use ebp, which points exactly to one pointer size below the previous esp.
843 // (Because creating a new stack frame pushes the previous ebp onto the stack
844 // and thereby moves up esp by one kPointerSize.)
845 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
846 __ mov(Operand(esp, 0 * kPointerSize), esi);
847
848 // esi: original subject string
849 // eax: underlying subject string
850 // ebx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000852 // edx: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100853 // Argument 4: End of string data
854 // Argument 3: Start of string data
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000855 // Prepare start and end index of the input.
856 // Load the length from the original sliced string if that is the case.
857 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100858 __ add(esi, edi); // Calculate input end wrt offset.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100859 __ SmiUntag(edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100860 __ add(ebx, edi); // Calculate input start wrt offset.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000861
862 // ebx: start index of the input string
863 // esi: end index of the input string
864 Label setup_two_byte, setup_rest;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100865 __ test(ecx, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000866 __ j(zero, &setup_two_byte, Label::kNear);
867 __ SmiUntag(esi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100869 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000870 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100871 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Ben Murdoch257744e2011-11-30 15:57:28 +0000872 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100873
874 __ bind(&setup_two_byte);
875 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000876 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
877 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100878 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
879 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
880 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
881
882 __ bind(&setup_rest);
883
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100884 // Locate the code entry and call it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100885 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
886 __ call(edx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100887
888 // Drop arguments and come back to JS mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100890
891 // Check the result.
892 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 __ cmp(eax, 1);
894 // We expect exactly one result since we force the called regexp to behave
895 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +0000896 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100897 Label failure;
898 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
Ben Murdoch257744e2011-11-30 15:57:28 +0000899 __ j(equal, &failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100900 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
901 // If not exception it can only be retry. Handle that in the runtime system.
902 __ j(not_equal, &runtime);
903 // Result must now be exception. If there is no pending exception already a
904 // stack overflow (on the backtrack stack) was detected in RegExp code but
905 // haven't created the exception yet. Handle that in the runtime system.
906 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdoch589d6972011-11-30 16:04:58 +0000907 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 isolate());
909 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100910 __ mov(eax, Operand::StaticVariable(pending_exception));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100911 __ cmp(edx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100912 __ j(equal, &runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000913
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100914 // For exception, throw the exception again.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100916
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100917 __ bind(&failure);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100918 // For failure to match, return null.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100919 __ mov(eax, factory->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100920 __ ret(4 * kPointerSize);
921
922 // Load RegExp data.
923 __ bind(&success);
924 __ mov(eax, Operand(esp, kJSRegExpOffset));
925 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
926 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
927 // Calculate number of capture registers (number_of_captures + 1) * 2.
928 STATIC_ASSERT(kSmiTag == 0);
929 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100930 __ add(edx, Immediate(2)); // edx was a smi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100931
932 // edx: Number of capture registers
933 // Load last_match_info which is still known to be a fast case JSArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 // Check that the fourth object is a JSArray object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100935 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000936 __ JumpIfSmi(eax, &runtime);
937 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
938 __ j(not_equal, &runtime);
939 // Check that the JSArray is in fast case.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100940 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
942 __ cmp(eax, factory->fixed_array_map());
943 __ j(not_equal, &runtime);
944 // Check that the last match info has space for the capture registers and the
945 // additional information.
946 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
947 __ SmiUntag(eax);
948 __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
949 __ cmp(edx, eax);
950 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100951
952 // ebx: last_match_info backing store (FixedArray)
953 // edx: number of capture registers
954 // Store the capture count.
955 __ SmiTag(edx); // Number of capture registers to smi.
956 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
957 __ SmiUntag(edx); // Number of capture registers back from smi.
958 // Store last subject and last input.
959 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000960 __ mov(ecx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100961 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100962 __ RecordWriteField(ebx,
963 RegExpImpl::kLastSubjectOffset,
964 eax,
965 edi,
966 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000967 __ mov(eax, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100968 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100969 __ RecordWriteField(ebx,
970 RegExpImpl::kLastInputOffset,
971 eax,
972 edi,
973 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100974
975 // Get the static offsets vector filled by the native regexp code.
976 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000977 ExternalReference::address_of_static_offsets_vector(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100978 __ mov(ecx, Immediate(address_of_static_offsets_vector));
979
980 // ebx: last_match_info backing store (FixedArray)
981 // ecx: offsets vector
982 // edx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +0000983 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100984 // Capture register counter starts from number of capture registers and
985 // counts down until wraping after zero.
986 __ bind(&next_capture);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100987 __ sub(edx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000988 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100989 // Read the value from the static offsets vector buffer.
990 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
991 __ SmiTag(edi);
992 // Store the smi value in the last match info.
993 __ mov(FieldOperand(ebx,
994 edx,
995 times_pointer_size,
996 RegExpImpl::kFirstCaptureOffset),
997 edi);
998 __ jmp(&next_capture);
999 __ bind(&done);
1000
1001 // Return last match info.
1002 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1003 __ ret(4 * kPointerSize);
1004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001005 // Do the runtime call to execute the regexp.
1006 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001007 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008
1009 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001010 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 __ bind(&not_seq_nor_cons);
1012 // Compare flags are still set from (3).
1013 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1014
Ben Murdoch097c5b22016-05-18 11:27:45 +01001015 // (7) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001016 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017 // Reload instance type.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001018 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1019 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1020 if (FLAG_debug_code) {
1021 // Assert that we do not have a cons or slice (indirect strings) here.
1022 // Sequential strings have already been ruled out.
Ben Murdochda12d292016-06-02 14:46:10 +01001023 __ test_b(ebx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001025 }
1026 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
1027 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001029 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1030 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001031 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdochda12d292016-06-02 14:46:10 +01001032 __ test_b(ebx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001034
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035 // eax: sequential subject string (or look-alike, external string)
1036 // edx: original subject string
1037 // ecx: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001039 __ bind(&seq_two_byte_string);
1040 // Load previous index and check range before edx is overwritten. We have
1041 // to use edx instead of eax here because it might have been only made to
1042 // look like a sequential string when it actually is an external string.
1043 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1044 __ JumpIfNotSmi(ebx, &runtime);
1045 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1046 __ j(above_equal, &runtime);
1047 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
1048 __ Move(ecx, Immediate(0)); // Type is two byte.
1049 __ jmp(&check_code); // Go to (E).
1050
1051 // (10) Not a string or a short external string? If yes, bail out to runtime.
1052 __ bind(&not_long_external);
1053 // Catch non-string subject or short external string.
1054 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1055 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1056 __ j(not_zero, &runtime);
1057
Ben Murdoch097c5b22016-05-18 11:27:45 +01001058 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 // Load offset into edi and replace subject string with parent.
1060 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
1061 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001062 __ jmp(&check_underlying); // Go to (1).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001063#endif // V8_INTERPRETED_REGEXP
1064}
1065
1066
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001067static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068 DCHECK(cc != equal);
1069 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001070 || (cc == greater) || (cc == greater_equal));
1071 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1072}
1073
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075static void CheckInputType(MacroAssembler* masm, Register input,
1076 CompareICState::State expected, Label* fail) {
1077 Label ok;
1078 if (expected == CompareICState::SMI) {
1079 __ JumpIfNotSmi(input, fail);
1080 } else if (expected == CompareICState::NUMBER) {
1081 __ JumpIfSmi(input, &ok);
1082 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
1083 Immediate(masm->isolate()->factory()->heap_number_map()));
1084 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001085 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001086 // We could be strict about internalized/non-internalized here, but as long as
1087 // hydrogen doesn't care, the stub doesn't have to care either.
1088 __ bind(&ok);
1089}
1090
1091
1092static void BranchIfNotInternalizedString(MacroAssembler* masm,
1093 Label* label,
1094 Register object,
1095 Register scratch) {
1096 __ JumpIfSmi(object, label);
1097 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1098 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1099 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1100 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1101 __ j(not_zero, label);
1102}
1103
1104
1105void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001106 Label runtime_call, check_unequal_objects;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001107 Condition cc = GetCondition();
1108
1109 Label miss;
1110 CheckInputType(masm, edx, left(), &miss);
1111 CheckInputType(masm, eax, right(), &miss);
1112
1113 // Compare two smis.
1114 Label non_smi, smi_done;
1115 __ mov(ecx, edx);
1116 __ or_(ecx, eax);
1117 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1118 __ sub(edx, eax); // Return on the result of the subtraction.
1119 __ j(no_overflow, &smi_done, Label::kNear);
1120 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1121 __ bind(&smi_done);
1122 __ mov(eax, edx);
1123 __ ret(0);
1124 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001125
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001126 // NOTICE! This code is only reached after a smi-fast-case check, so
1127 // it is certain that at least one operand isn't a smi.
1128
1129 // Identical objects can be compared fast, but there are some tricky cases
1130 // for NaN and undefined.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 Label generic_heap_number_comparison;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001132 {
1133 Label not_identical;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001134 __ cmp(eax, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001135 __ j(not_equal, &not_identical);
1136
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001138 // Check for undefined. undefined OP undefined is false even though
1139 // undefined == undefined.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140 __ cmp(edx, isolate()->factory()->undefined_value());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001141 Label check_for_nan;
1142 __ j(not_equal, &check_for_nan, Label::kNear);
1143 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1144 __ ret(0);
1145 __ bind(&check_for_nan);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001146 }
1147
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 // Test for NaN. Compare heap numbers in a general way,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 // to handle NaNs correctly.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1151 Immediate(isolate()->factory()->heap_number_map()));
1152 __ j(equal, &generic_heap_number_comparison, Label::kNear);
1153 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1155 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 // Call runtime on identical JSObjects. Otherwise return equal.
Ben Murdochda12d292016-06-02 14:46:10 +01001157 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ j(above_equal, &runtime_call, Label::kFar);
1159 // Call runtime on identical symbols since we need to throw a TypeError.
Ben Murdochda12d292016-06-02 14:46:10 +01001160 __ cmpb(ecx, Immediate(SYMBOL_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001161 __ j(equal, &runtime_call, Label::kFar);
1162 // Call runtime on identical SIMD values since we must throw a TypeError.
Ben Murdochda12d292016-06-02 14:46:10 +01001163 __ cmpb(ecx, Immediate(SIMD128_VALUE_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164 __ j(equal, &runtime_call, Label::kFar);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001165 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1167 __ ret(0);
1168
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001169
1170 __ bind(&not_identical);
1171 }
1172
1173 // Strict equality can quickly decide whether objects are equal.
1174 // Non-strict object equality is slower, so it is handled later in the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 if (cc == equal && strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001176 Label slow; // Fallthrough label.
Ben Murdoch257744e2011-11-30 15:57:28 +00001177 Label not_smis;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001178 // If we're doing a strict equality comparison, we don't have to do
1179 // type conversion, so we generate code to do fast comparison for objects
1180 // and oddballs. Non-smi numbers and strings still go through the usual
1181 // slow-case code.
1182 // If either is a Smi (we know that not both are), then they can only
1183 // be equal if the other is a HeapNumber. If so, use the slow case.
1184 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001186 __ mov(ecx, Immediate(kSmiTagMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001187 __ and_(ecx, eax);
1188 __ test(ecx, edx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001189 __ j(not_zero, &not_smis, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001190 // One operand is a smi.
1191
1192 // Check whether the non-smi is a heap number.
1193 STATIC_ASSERT(kSmiTagMask == 1);
1194 // ecx still holds eax & kSmiTag, which is either zero or one.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001195 __ sub(ecx, Immediate(0x01));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001196 __ mov(ebx, edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001197 __ xor_(ebx, eax);
1198 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
1199 __ xor_(ebx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001200 // if eax was smi, ebx is now edx, else eax.
1201
1202 // Check if the non-smi operand is a heap number.
1203 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001204 Immediate(isolate()->factory()->heap_number_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001205 // If heap number, handle it in the slow case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001206 __ j(equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001207 // Return non-equal (ebx is not zero)
1208 __ mov(eax, ebx);
1209 __ ret(0);
1210
1211 __ bind(&not_smis);
1212 // If either operand is a JSObject or an oddball value, then they are not
1213 // equal since their pointers are different
1214 // There is no test for undetectability in strict equality.
1215
1216 // Get the type of the first operand.
1217 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00001218 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1220 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001221 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001222
1223 // Return non-zero (eax is not zero)
Ben Murdoch257744e2011-11-30 15:57:28 +00001224 Label return_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001225 STATIC_ASSERT(kHeapObjectTag != 0);
1226 __ bind(&return_not_equal);
1227 __ ret(0);
1228
1229 __ bind(&first_non_object);
1230 // Check for oddballs: true, false, null, undefined.
1231 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1232 __ j(equal, &return_not_equal);
1233
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001234 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001235 __ j(above_equal, &return_not_equal);
1236
1237 // Check for oddballs: true, false, null, undefined.
1238 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1239 __ j(equal, &return_not_equal);
1240
1241 // Fall through to the general case.
1242 __ bind(&slow);
1243 }
1244
1245 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001246 Label non_number_comparison;
1247 Label unordered;
1248 __ bind(&generic_heap_number_comparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001249
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1251 __ ucomisd(xmm0, xmm1);
1252 // Don't base result on EFLAGS when a NaN is involved.
1253 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001254
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255 __ mov(eax, 0); // equal
1256 __ mov(ecx, Immediate(Smi::FromInt(1)));
1257 __ cmov(above, eax, ecx);
1258 __ mov(ecx, Immediate(Smi::FromInt(-1)));
1259 __ cmov(below, eax, ecx);
1260 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001261
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262 // If one of the numbers was NaN, then the result is always false.
1263 // The cc is never not-equal.
1264 __ bind(&unordered);
1265 DCHECK(cc != not_equal);
1266 if (cc == less || cc == less_equal) {
1267 __ mov(eax, Immediate(Smi::FromInt(1)));
1268 } else {
1269 __ mov(eax, Immediate(Smi::FromInt(-1)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001270 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001271 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001272
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001273 // The number comparison code did not provide a valid result.
1274 __ bind(&non_number_comparison);
1275
1276 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001277 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 if (cc == equal) {
1279 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1280 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001281
1282 // We've already checked for object identity, so if both operands
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001283 // are internalized they aren't equal. Register eax already holds a
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001284 // non-zero value, which indicates not equal, so just return.
1285 __ ret(0);
1286 }
1287
1288 __ bind(&check_for_strings);
1289
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1291 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001292
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 // Inline comparison of one-byte strings.
1294 if (cc == equal) {
1295 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001296 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001297 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1298 edi);
Ben Murdoch257744e2011-11-30 15:57:28 +00001299 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001300#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001301 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001302#endif
1303
1304 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001306 // Non-strict equality. Objects are unequal if
1307 // they are both JSObjects and not undetectable,
1308 // and their pointers are different.
Ben Murdochda12d292016-06-02 14:46:10 +01001309 Label return_equal, return_unequal, undetectable;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001310 // At most one is a smi, so we can test for smi by adding the two.
1311 // A smi plus a heap object has the low bit set, a heap object plus
1312 // a heap object has the low bit clear.
1313 STATIC_ASSERT(kSmiTag == 0);
1314 STATIC_ASSERT(kSmiTagMask == 1);
1315 __ lea(ecx, Operand(eax, edx, times_1, 0));
1316 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdochda12d292016-06-02 14:46:10 +01001317 __ j(not_zero, &runtime_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001318
1319 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1320 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1321
1322 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001323 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001324 __ j(not_zero, &undetectable, Label::kNear);
1325 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001326 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001327 __ j(not_zero, &return_unequal, Label::kNear);
1328
1329 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001330 __ j(below, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001331 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 __ j(below, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001333
1334 __ bind(&return_unequal);
1335 // Return non-equal by returning the non-zero object pointer in eax.
1336 __ ret(0); // eax, edx were pushed
1337
1338 __ bind(&undetectable);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001339 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001340 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch257744e2011-11-30 15:57:28 +00001341 __ j(zero, &return_unequal, Label::kNear);
Ben Murdochda12d292016-06-02 14:46:10 +01001342
1343 // If both sides are JSReceivers, then the result is false according to
1344 // the HTML specification, which says that only comparisons with null or
1345 // undefined are affected by special casing for document.all.
1346 __ CmpInstanceType(ebx, ODDBALL_TYPE);
1347 __ j(zero, &return_equal, Label::kNear);
1348 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1349 __ j(not_zero, &return_unequal, Label::kNear);
1350
1351 __ bind(&return_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 __ Move(eax, Immediate(EQUAL));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001353 __ ret(0); // eax, edx were pushed
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001354 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001356
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001358 {
1359 FrameScope scope(masm, StackFrame::INTERNAL);
1360 __ Push(edx);
1361 __ Push(eax);
1362 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1363 }
1364 // Turn true into 0 and false into some non-zero value.
1365 STATIC_ASSERT(EQUAL == 0);
1366 __ sub(eax, Immediate(isolate()->factory()->true_value()));
1367 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001368 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001369 // Push arguments below the return address.
1370 __ pop(ecx);
1371 __ push(edx);
1372 __ push(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001373 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 __ push(ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1376 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001377 __ TailCallRuntime(Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001378 }
1379
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380 __ bind(&miss);
1381 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001382}
1383
1384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001385static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1386 // eax : number of arguments to the construct function
1387 // ebx : feedback vector
1388 // edx : slot in feedback vector (Smi)
1389 // edi : the function to call
1390
1391 {
1392 FrameScope scope(masm, StackFrame::INTERNAL);
1393
1394 // Number-of-arguments register must be smi-tagged to call out.
1395 __ SmiTag(eax);
1396 __ push(eax);
1397 __ push(edi);
1398 __ push(edx);
1399 __ push(ebx);
1400
1401 __ CallStub(stub);
1402
1403 __ pop(ebx);
1404 __ pop(edx);
1405 __ pop(edi);
1406 __ pop(eax);
1407 __ SmiUntag(eax);
1408 }
1409}
1410
1411
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001412static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001413 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001414 // are uninitialized, monomorphic (indicated by a JSFunction), and
1415 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416 // eax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001417 // ebx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 // edx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001419 // edi : the function to call
1420 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001422 Label done_increment_count, done_initialize_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001423
1424 // Load the cache state into ecx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001425 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1426 FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001427
1428 // A monomorphic cache hit or an already megamorphic state: invoke the
1429 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1431 // at this position in a symbol (see static asserts in
1432 // type-feedback-vector.h).
1433 Label check_allocation_site;
1434 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001435 __ j(equal, &done_increment_count, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001437 __ j(equal, &done, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1439 Heap::kWeakCellMapRootIndex);
1440 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001441
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001442 // If the weak cell is cleared, we have a new chance to become monomorphic.
1443 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1444 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001445
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001446 __ bind(&check_allocation_site);
1447 // If we came here, we need to see if we are the array function.
1448 // If we didn't have a matching function, and we didn't find the megamorph
1449 // sentinel, then we have in the slot either some other function or an
1450 // AllocationSite.
1451 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1452 __ j(not_equal, &miss);
1453
1454 // Make sure the function is the Array() function
1455 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1456 __ cmp(edi, ecx);
1457 __ j(not_equal, &megamorphic);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001458 __ jmp(&done_increment_count, Label::kFar);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459
1460 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001461
1462 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1463 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001465 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001466 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1467 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 __ bind(&megamorphic);
1469 __ mov(
1470 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1471 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1472 __ jmp(&done, Label::kFar);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001473
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001474 // An uninitialized cache is patched with the function or sentinel to
1475 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001476 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 // Make sure the function is the Array() function
1478 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1479 __ cmp(edi, ecx);
1480 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001481
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 // The target function is the Array constructor,
1483 // Create an AllocationSite if we don't already have it, store it in the
1484 // slot.
1485 CreateAllocationSiteStub create_stub(isolate);
1486 CallStubInRecordCallTarget(masm, &create_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001487 __ jmp(&done_initialize_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001489 __ bind(&not_array_function);
1490 CreateWeakCellStub weak_cell_stub(isolate);
1491 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001492 __ bind(&done_initialize_count);
1493
1494 // Initialize the call counter.
1495 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
1496 FixedArray::kHeaderSize + kPointerSize),
1497 Immediate(Smi::FromInt(1)));
1498 __ jmp(&done);
1499
1500 __ bind(&done_increment_count);
1501 // Increment the call count for monomorphic function calls.
1502 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1503 FixedArray::kHeaderSize + kPointerSize),
1504 Immediate(Smi::FromInt(1)));
1505
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001506 __ bind(&done);
1507}
1508
1509
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001510void CallConstructStub::Generate(MacroAssembler* masm) {
1511 // eax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512 // ebx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 // edx : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001514 // edi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001517 // Check that function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 __ JumpIfSmi(edi, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001519 // Check that function is a JSFunction.
1520 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001521 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001522
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001523 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001524
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 Label feedback_register_initialized;
1526 // Put the AllocationSite from the feedback vector into ebx, or undefined.
1527 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
1528 FixedArray::kHeaderSize));
1529 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
1530 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1531 __ j(equal, &feedback_register_initialized);
1532 __ mov(ebx, isolate()->factory()->undefined_value());
1533 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001534
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535 __ AssertUndefinedOrAllocationSite(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 // Pass new target to construct stub.
1538 __ mov(edx, edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 // Tail call to the function-specific construct stub (still in the caller
1541 // context at this point).
1542 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1543 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
1544 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
1545 __ jmp(ecx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001547 __ bind(&non_function);
1548 __ mov(edx, edi);
1549 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001550}
1551
1552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001554 // edi - function
1555 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001556 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1558 __ cmp(edi, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001559 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560
1561 __ mov(eax, arg_count());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 // Reload ecx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001563 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1564 FixedArray::kHeaderSize));
1565
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001566 // Increment the call count for monomorphic function calls.
1567 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1568 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001569 Immediate(Smi::FromInt(1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570
1571 __ mov(ebx, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 __ mov(edx, edi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001573 ArrayConstructorStub stub(masm->isolate(), arg_count());
1574 __ TailCallStub(&stub);
1575
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001576 // Unreachable.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001577}
1578
1579
1580void CallICStub::Generate(MacroAssembler* masm) {
1581 // edi - function
1582 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001583 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586 int argc = arg_count();
1587 ParameterCount actual(argc);
1588
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001589 // The checks. First, does edi match the recorded monomorphic target?
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1591 FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592
1593 // We don't know that we have a weak cell. We might have a private symbol
1594 // or an AllocationSite, but the memory is safe to examine.
1595 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1596 // FixedArray.
1597 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1598 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1599 // computed, meaning that it can't appear to be a pointer. If the low bit is
1600 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1601 // to be a pointer.
1602 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1603 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1604 WeakCell::kValueOffset &&
1605 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1606
1607 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1608 __ j(not_equal, &extra_checks_or_miss);
1609
1610 // The compare above could have been a SMI/SMI comparison. Guard against this
1611 // convincing us that we have a monomorphic JSFunction.
1612 __ JumpIfSmi(edi, &extra_checks_or_miss);
1613
1614 // Increment the call count for monomorphic function calls.
1615 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1616 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001617 Immediate(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618
1619 __ bind(&call_function);
1620 __ Set(eax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1622 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623 RelocInfo::CODE_TARGET);
1624
1625 __ bind(&extra_checks_or_miss);
1626 Label uninitialized, miss, not_allocation_site;
1627
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001628 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 __ j(equal, &call);
1630
1631 // Check if we have an allocation site.
1632 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1633 Heap::kAllocationSiteMapRootIndex);
1634 __ j(not_equal, &not_allocation_site);
1635
1636 // We have an allocation site.
1637 HandleArrayCase(masm, &miss);
1638
1639 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001641 // The following cases attempt to handle MISS cases without going to the
1642 // runtime.
1643 if (FLAG_trace_ic) {
1644 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645 }
1646
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001647 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1648 __ j(equal, &uninitialized);
1649
1650 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1651 // to handle it here. More complex cases are dealt with in the runtime.
1652 __ AssertNotSmi(ecx);
1653 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
1654 __ j(not_equal, &miss);
1655 __ mov(
1656 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1657 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658
1659 __ bind(&call);
1660 __ Set(eax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001661 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001663
1664 __ bind(&uninitialized);
1665
1666 // We are going monomorphic, provided we actually have a JSFunction.
1667 __ JumpIfSmi(edi, &miss);
1668
1669 // Goto miss case if we do not have a function.
1670 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1671 __ j(not_equal, &miss);
1672
1673 // Make sure the function is not the Array() function, which requires special
1674 // behavior on MISS.
1675 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1676 __ cmp(edi, ecx);
1677 __ j(equal, &miss);
1678
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001679 // Make sure the function belongs to the same native context.
1680 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
1681 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
1682 __ cmp(ecx, NativeContextOperand());
1683 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001684
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001685 // Initialize the call counter.
1686 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
1687 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001688 Immediate(Smi::FromInt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001689
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001690 // Store the function. Use a stub since we need a frame for allocation.
1691 // ebx - vector
1692 // edx - slot
1693 // edi - function
1694 {
1695 FrameScope scope(masm, StackFrame::INTERNAL);
1696 CreateWeakCellStub create_stub(isolate);
1697 __ push(edi);
1698 __ CallStub(&create_stub);
1699 __ pop(edi);
1700 }
1701
1702 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001703
1704 // We are here because tracing is on or we encountered a MISS case we can't
1705 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001706 __ bind(&miss);
1707 GenerateMiss(masm);
1708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710
1711 // Unreachable
1712 __ int3();
1713}
1714
1715
1716void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001719 // Push the function and feedback info.
1720 __ push(edi);
1721 __ push(ebx);
1722 __ push(edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001724 // Call the entry.
1725 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727 // Move result to edi and exit the internal frame.
1728 __ mov(edi, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729}
1730
1731
Steve Block44f0eee2011-05-26 01:26:41 +01001732bool CEntryStub::NeedsImmovableCode() {
1733 return false;
1734}
1735
1736
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001737void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1738 CEntryStub::GenerateAheadOfTime(isolate);
1739 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1740 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001741 // It is important that the store buffer overflow stubs are generated first.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001742 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001743 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001745 BinaryOpICStub::GenerateAheadOfTime(isolate);
1746 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747 StoreFastElementStub::GenerateAheadOfTime(isolate);
1748 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001749}
1750
1751
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752void CodeStub::GenerateFPStubs(Isolate* isolate) {
1753 // Generate if not already in cache.
1754 CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
1755 isolate->set_fp_stubs_generated(true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001756}
1757
1758
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1760 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1761 stub.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001762}
1763
1764
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001765void CEntryStub::Generate(MacroAssembler* masm) {
1766 // eax: number of arguments including receiver
1767 // ebx: pointer to C function (C callee-saved)
1768 // ebp: frame pointer (restored after C call)
1769 // esp: stack pointer (restored after C call)
1770 // esi: current context (C callee-saved)
1771 // edi: JS function of the caller (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 //
1773 // If argv_in_register():
1774 // ecx: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001775
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001777
Ben Murdoch097c5b22016-05-18 11:27:45 +01001778 // Reserve space on the stack for the three arguments passed to the call. If
1779 // result size is greater than can be returned in registers, also reserve
1780 // space for the hidden argument for the result location, and space for the
1781 // result itself.
1782 int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
1783
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001784 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 if (argv_in_register()) {
1786 DCHECK(!save_doubles());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001787 __ EnterApiExitFrame(arg_stack_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788
1789 // Move argc and argv into the correct registers.
1790 __ mov(esi, ecx);
1791 __ mov(edi, eax);
1792 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001793 __ EnterExitFrame(arg_stack_space, save_doubles());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001794 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001795
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 // ebx: pointer to C function (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001797 // ebp: frame pointer (restored after C call)
1798 // esp: stack pointer (restored after C call)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799 // edi: number of arguments including receiver (C callee-saved)
1800 // esi: pointer to the first argument (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 // Result returned in eax, or eax+edx if result size is 2.
1803
1804 // Check stack alignment.
1805 if (FLAG_debug_code) {
1806 __ CheckStackAlignment();
1807 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 // Call C function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001809 if (result_size() <= 2) {
1810 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
1811 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
1812 __ mov(Operand(esp, 2 * kPointerSize),
1813 Immediate(ExternalReference::isolate_address(isolate())));
1814 } else {
1815 DCHECK_EQ(3, result_size());
1816 // Pass a pointer to the result location as the first argument.
1817 __ lea(eax, Operand(esp, 4 * kPointerSize));
1818 __ mov(Operand(esp, 0 * kPointerSize), eax);
1819 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc.
1820 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv.
1821 __ mov(Operand(esp, 3 * kPointerSize),
1822 Immediate(ExternalReference::isolate_address(isolate())));
1823 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824 __ call(ebx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001825
1826 if (result_size() > 2) {
1827 DCHECK_EQ(3, result_size());
1828#ifndef _WIN32
1829 // Restore the "hidden" argument on the stack which was popped by caller.
1830 __ sub(esp, Immediate(kPointerSize));
1831#endif
1832 // Read result values stored on stack. Result is stored above the arguments.
1833 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
1834 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
1835 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
1836 }
1837 // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001838
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 // Check result for exception sentinel.
1840 Label exception_returned;
1841 __ cmp(eax, isolate()->factory()->exception());
1842 __ j(equal, &exception_returned);
1843
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844 // Check that there is no pending exception, otherwise we
1845 // should have returned the exception sentinel.
1846 if (FLAG_debug_code) {
1847 __ push(edx);
1848 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1849 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850 ExternalReference pending_exception_address(
1851 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
1853 // Cannot use check here as it attempts to generate call into runtime.
1854 __ j(equal, &okay, Label::kNear);
1855 __ int3();
1856 __ bind(&okay);
1857 __ pop(edx);
1858 }
1859
1860 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001862 __ ret(0);
1863
1864 // Handling of exception.
1865 __ bind(&exception_returned);
1866
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867 ExternalReference pending_handler_context_address(
1868 Isolate::kPendingHandlerContextAddress, isolate());
1869 ExternalReference pending_handler_code_address(
1870 Isolate::kPendingHandlerCodeAddress, isolate());
1871 ExternalReference pending_handler_offset_address(
1872 Isolate::kPendingHandlerOffsetAddress, isolate());
1873 ExternalReference pending_handler_fp_address(
1874 Isolate::kPendingHandlerFPAddress, isolate());
1875 ExternalReference pending_handler_sp_address(
1876 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001878 // Ask the runtime for help to determine the handler. This will set eax to
1879 // contain the current pending exception, don't clobber it.
1880 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1881 isolate());
1882 {
1883 FrameScope scope(masm, StackFrame::MANUAL);
1884 __ PrepareCallCFunction(3, eax);
1885 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc.
1886 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv.
1887 __ mov(Operand(esp, 2 * kPointerSize),
1888 Immediate(ExternalReference::isolate_address(isolate())));
1889 __ CallCFunction(find_handler, 3);
1890 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001891
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892 // Retrieve the handler context, SP and FP.
1893 __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
1894 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
1895 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001897 // If the handler is a JS frame, restore the context to the frame. Note that
1898 // the context will be set to (esi == 0) for non-JS frames.
1899 Label skip;
1900 __ test(esi, esi);
1901 __ j(zero, &skip, Label::kNear);
1902 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1903 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001904
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905 // Compute the handler entry address and jump to it.
1906 __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
1907 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
1908 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1909 __ jmp(edi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001910}
1911
1912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001913void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001914 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001915 Label not_outermost_js, not_outermost_js_2;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001916
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1918
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001919 // Set up frame.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001920 __ push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001921 __ mov(ebp, esp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001922
1923 // Push marker in two places.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 int marker = type();
Ben Murdochda12d292016-06-02 14:46:10 +01001925 __ push(Immediate(Smi::FromInt(marker))); // marker
1926 ExternalReference context_address(Isolate::kContextAddress, isolate());
1927 __ push(Operand::StaticVariable(context_address)); // context
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001928 // Save callee-saved registers (C calling conventions).
1929 __ push(edi);
1930 __ push(esi);
1931 __ push(ebx);
1932
1933 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001935 __ push(Operand::StaticVariable(c_entry_fp));
1936
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001937 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001938 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001939 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001940 __ j(not_equal, &not_outermost_js, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001941 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
Steve Block053d10c2011-06-13 19:13:29 +01001942 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001943 __ jmp(&invoke, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001944 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01001945 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001946
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001947 // Jump to a faked try block that does the invoke, with a faked catch
1948 // block that sets the pending exception.
1949 __ jmp(&invoke);
1950 __ bind(&handler_entry);
1951 handler_offset_ = handler_entry.pos();
1952 // Caught exception: Store result (exception) in the pending exception
1953 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00001954 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001955 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001956 __ mov(Operand::StaticVariable(pending_exception), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001957 __ mov(eax, Immediate(isolate()->factory()->exception()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001958 __ jmp(&exit);
1959
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001960 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001961 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001962 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001963
1964 // Clear any pending exceptions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001965 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001966 __ mov(Operand::StaticVariable(pending_exception), edx);
1967
1968 // Fake a receiver (NULL).
1969 __ push(Immediate(0)); // receiver
1970
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001971 // Invoke the function by calling through JS entry trampoline builtin and
1972 // pop the faked function when we return. Notice that we cannot store a
1973 // reference to the trampoline code directly in this stub, because the
1974 // builtin stubs may not have been generated yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001975 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001976 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001978 __ mov(edx, Immediate(construct_entry));
1979 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001980 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001981 __ mov(edx, Immediate(entry));
1982 }
1983 __ mov(edx, Operand(edx, 0)); // deref address
1984 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001985 __ call(edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001986
1987 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001989
Steve Block053d10c2011-06-13 19:13:29 +01001990 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01001991 // Check if the current stack frame is marked as the outermost JS frame.
1992 __ pop(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001993 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001994 __ j(not_equal, &not_outermost_js_2);
1995 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
1996 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001997
1998 // Restore the top frame descriptor from the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01001999 __ pop(Operand::StaticVariable(ExternalReference(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000 Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002001
2002 // Restore callee-saved registers (C calling conventions).
2003 __ pop(ebx);
2004 __ pop(esi);
2005 __ pop(edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002006 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002007
2008 // Restore frame pointer and return.
2009 __ pop(ebp);
2010 __ ret(0);
2011}
2012
2013
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002014// -------------------------------------------------------------------------
2015// StringCharCodeAtGenerator
2016
2017void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002018 // If the receiver is a smi trigger the non-string case.
2019 STATIC_ASSERT(kSmiTag == 0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002020 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2021 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002022
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002023 // Fetch the instance type of the receiver into result register.
2024 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2025 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2026 // If the receiver is not a string trigger the non-string case.
2027 __ test(result_, Immediate(kIsNotStringMask));
2028 __ j(not_zero, receiver_not_string_);
2029 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002030
2031 // If the index is non-smi trigger the non-smi case.
2032 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002033 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002034 __ bind(&got_smi_index_);
2035
2036 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002037 __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002038 __ j(above_equal, index_out_of_range_);
2039
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002040 __ SmiUntag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002041
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002042 Factory* factory = masm->isolate()->factory();
2043 StringCharLoadGenerator::Generate(
2044 masm, factory, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002045
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002046 __ SmiTag(result_);
2047 __ bind(&exit_);
2048}
2049
2050
2051void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002052 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002053 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002054 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002055
2056 // Index is not a smi.
2057 __ bind(&index_not_smi_);
2058 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01002059 __ CheckMap(index_,
2060 masm->isolate()->factory()->heap_number_map(),
2061 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002062 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002063 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 if (embed_mode == PART_OF_IC_HANDLER) {
2065 __ push(LoadWithVectorDescriptor::VectorRegister());
2066 __ push(LoadDescriptor::SlotRegister());
2067 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002068 __ push(object_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002069 __ push(index_); // Consumed by runtime conversion function.
Ben Murdoch61f157c2016-09-16 13:49:30 +01002070 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002071 if (!index_.is(eax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002072 // Save the conversion result before the pop instructions below
2073 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002074 __ mov(index_, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002075 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002076 __ pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002077 if (embed_mode == PART_OF_IC_HANDLER) {
2078 __ pop(LoadDescriptor::SlotRegister());
2079 __ pop(LoadWithVectorDescriptor::VectorRegister());
2080 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002081 // Reload the instance type.
2082 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2083 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2084 call_helper.AfterCall(masm);
2085 // If index is still not a smi, it must be out of range.
2086 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002087 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002088 // Otherwise, return to the fast path.
2089 __ jmp(&got_smi_index_);
2090
2091 // Call runtime. We get here when the receiver is a string and the
2092 // index is a number, but the code of getting the actual character
2093 // is too complex (e.g., when the string needs to be flattened).
2094 __ bind(&call_runtime_);
2095 call_helper.BeforeCall(masm);
2096 __ push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002097 __ SmiTag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002098 __ push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002099 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002100 if (!result_.is(eax)) {
2101 __ mov(result_, eax);
2102 }
2103 call_helper.AfterCall(masm);
2104 __ jmp(&exit_);
2105
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002107}
2108
2109
2110// -------------------------------------------------------------------------
2111// StringCharFromCodeGenerator
2112
2113void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2114 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2115 STATIC_ASSERT(kSmiTag == 0);
2116 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002117 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2118 __ test(code_, Immediate(kSmiTagMask |
2119 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002120 __ j(not_zero, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002121
Steve Block44f0eee2011-05-26 01:26:41 +01002122 Factory* factory = masm->isolate()->factory();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002123 __ Move(result_, Immediate(factory->single_character_string_cache()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002124 STATIC_ASSERT(kSmiTag == 0);
2125 STATIC_ASSERT(kSmiTagSize == 1);
2126 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002127 // At this point code register contains smi tagged one byte char code.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002128 __ mov(result_, FieldOperand(result_,
2129 code_, times_half_pointer_size,
2130 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002131 __ cmp(result_, factory->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002132 __ j(equal, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002133 __ bind(&exit_);
2134}
2135
2136
2137void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002138 MacroAssembler* masm,
2139 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002140 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002141
2142 __ bind(&slow_case_);
2143 call_helper.BeforeCall(masm);
2144 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002145 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002146 if (!result_.is(eax)) {
2147 __ mov(result_, eax);
2148 }
2149 call_helper.AfterCall(masm);
2150 __ jmp(&exit_);
2151
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002152 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002153}
2154
2155
2156void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2157 Register dest,
2158 Register src,
2159 Register count,
2160 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161 String::Encoding encoding) {
2162 DCHECK(!scratch.is(dest));
2163 DCHECK(!scratch.is(src));
2164 DCHECK(!scratch.is(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002165
2166 // Nothing to do for zero characters.
2167 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002168 __ test(count, count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002169 __ j(zero, &done);
2170
2171 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002173 __ shl(count, 1);
2174 }
2175
Ben Murdoch257744e2011-11-30 15:57:28 +00002176 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002177 __ bind(&loop);
2178 __ mov_b(scratch, Operand(src, 0));
2179 __ mov_b(Operand(dest, 0), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002180 __ inc(src);
2181 __ inc(dest);
2182 __ dec(count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002183 __ j(not_zero, &loop);
2184
2185 __ bind(&done);
2186}
2187
2188
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002189void SubStringStub::Generate(MacroAssembler* masm) {
2190 Label runtime;
2191
2192 // Stack frame on entry.
2193 // esp[0]: return address
2194 // esp[4]: to
2195 // esp[8]: from
2196 // esp[12]: string
2197
2198 // Make sure first argument is a string.
2199 __ mov(eax, Operand(esp, 3 * kPointerSize));
2200 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002201 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002202 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
2203 __ j(NegateCondition(is_string), &runtime);
2204
2205 // eax: string
2206 // ebx: instance type
2207
2208 // Calculate length of sub string using the smi values.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002209 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002210 __ JumpIfNotSmi(ecx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002211 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002212 __ JumpIfNotSmi(edx, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002213 __ sub(ecx, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002214 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002215 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216 // Shorter than original string's length: an actual substring.
2217 __ j(below, &not_original_string, Label::kNear);
2218 // Longer than original string's length or negative: unsafe arguments.
2219 __ j(above, &runtime);
2220 // Return original string.
2221 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002222 __ IncrementCounter(counters->sub_string_native(), 1);
2223 __ ret(3 * kPointerSize);
2224 __ bind(&not_original_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002225
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226 Label single_char;
2227 __ cmp(ecx, Immediate(Smi::FromInt(1)));
2228 __ j(equal, &single_char);
2229
Ben Murdochc7cc0282012-03-05 14:35:55 +00002230 // eax: string
2231 // ebx: instance type
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002232 // ecx: sub string length (smi)
Ben Murdochc7cc0282012-03-05 14:35:55 +00002233 // edx: from index (smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002234 // Deal with different string types: update the index if necessary
2235 // and put the underlying string into edi.
2236 Label underlying_unpacked, sliced_string, seq_or_external_string;
2237 // If the string is not indirect, it can only be sequential or external.
2238 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2239 STATIC_ASSERT(kIsIndirectStringMask != 0);
2240 __ test(ebx, Immediate(kIsIndirectStringMask));
2241 __ j(zero, &seq_or_external_string, Label::kNear);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243 Factory* factory = isolate()->factory();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002244 __ test(ebx, Immediate(kSlicedNotConsMask));
2245 __ j(not_zero, &sliced_string, Label::kNear);
2246 // Cons string. Check whether it is flat, then fetch first part.
2247 // Flat cons strings have an empty second part.
2248 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
2249 factory->empty_string());
2250 __ j(not_equal, &runtime);
2251 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
2252 // Update instance type.
2253 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002254 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002255 __ jmp(&underlying_unpacked, Label::kNear);
2256
2257 __ bind(&sliced_string);
2258 // Sliced string. Fetch parent and adjust start index by offset.
2259 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
2260 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
2261 // Update instance type.
2262 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
2263 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2264 __ jmp(&underlying_unpacked, Label::kNear);
2265
2266 __ bind(&seq_or_external_string);
2267 // Sequential or external string. Just move string to the expected register.
2268 __ mov(edi, eax);
2269
2270 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002271
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002272 if (FLAG_string_slices) {
2273 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002274 // edi: underlying subject string
2275 // ebx: instance type of underlying subject string
2276 // edx: adjusted start index (smi)
2277 // ecx: length (smi)
2278 __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002279 // Short slice. Copy instead of slicing.
2280 __ j(less, &copy_routine);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002281 // Allocate new sliced string. At this point we do not reload the instance
2282 // type including the string encoding because we simply rely on the info
2283 // provided by the original string. It does not matter if the original
2284 // string's encoding is wrong because we always have to recheck encoding of
2285 // the newly created string's parent anyways due to externalized strings.
2286 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002288 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2289 __ test(ebx, Immediate(kStringEncodingMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002290 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002291 __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002292 __ jmp(&set_slice_header, Label::kNear);
2293 __ bind(&two_byte_slice);
Ben Murdoch589d6972011-11-30 16:04:58 +00002294 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002295 __ bind(&set_slice_header);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002296 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002297 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
2298 Immediate(String::kEmptyHashField));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002299 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
2300 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
2301 __ IncrementCounter(counters->sub_string_native(), 1);
2302 __ ret(3 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002303
2304 __ bind(&copy_routine);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002305 }
2306
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002307 // edi: underlying subject string
2308 // ebx: instance type of underlying subject string
2309 // edx: adjusted start index (smi)
2310 // ecx: length (smi)
2311 // The subject string can only be external or sequential string of either
2312 // encoding at this point.
2313 Label two_byte_sequential, runtime_drop_two, sequential_string;
2314 STATIC_ASSERT(kExternalStringTag != 0);
2315 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002316 __ test_b(ebx, Immediate(kExternalStringTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002317 __ j(zero, &sequential_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002318
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002319 // Handle external string.
2320 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002322 __ test_b(ebx, Immediate(kShortExternalStringMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002323 __ j(not_zero, &runtime);
2324 __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
2325 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002326 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002327 __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2328
2329 __ bind(&sequential_string);
2330 // Stash away (adjusted) index and (underlying) string.
2331 __ push(edx);
2332 __ push(edi);
2333 __ SmiUntag(ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002334 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002335 __ test_b(ebx, Immediate(kStringEncodingMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002336 __ j(zero, &two_byte_sequential);
2337
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338 // Sequential one byte string. Allocate the result.
2339 __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002340
2341 // eax: result string
2342 // ecx: result string length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002343 // Locate first character of result.
2344 __ mov(edi, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002345 __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002346 // Load string argument and locate character of sub string start.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347 __ pop(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002348 __ pop(ebx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002349 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002350 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002351
2352 // eax: result string
2353 // ecx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002354 // edi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002355 // edx: character of sub string start
2356 StringHelper::GenerateCopyCharacters(
2357 masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002358 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002359 __ ret(3 * kPointerSize);
2360
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002361 __ bind(&two_byte_sequential);
2362 // Sequential two-byte string. Allocate the result.
2363 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002364
2365 // eax: result string
2366 // ecx: result string length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002367 // Locate first character of result.
2368 __ mov(edi, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002369 __ add(edi,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002370 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2371 // Load string argument and locate character of sub string start.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 __ pop(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002373 __ pop(ebx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002374 // As from is a smi it is 2 times the value which matches the size of a two
2375 // byte character.
2376 STATIC_ASSERT(kSmiTag == 0);
2377 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002379
2380 // eax: result string
2381 // ecx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002382 // edi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002383 // edx: character of sub string start
2384 StringHelper::GenerateCopyCharacters(
2385 masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002386 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002387 __ ret(3 * kPointerSize);
2388
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002389 // Drop pushed values on the stack before tail call.
2390 __ bind(&runtime_drop_two);
2391 __ Drop(2);
2392
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002393 // Just jump to runtime to create the sub string.
2394 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002396
2397 __ bind(&single_char);
2398 // eax: string
2399 // ebx: instance type
2400 // ecx: sub string length (smi)
2401 // edx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002402 StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002403 &runtime, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 generator.GenerateFast(masm);
2405 __ ret(3 * kPointerSize);
2406 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002407}
2408
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002409void ToStringStub::Generate(MacroAssembler* masm) {
2410 // The ToString stub takes one argument in eax.
2411 Label is_number;
2412 __ JumpIfSmi(eax, &is_number, Label::kNear);
2413
2414 Label not_string;
2415 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
2416 // eax: receiver
2417 // edi: receiver map
2418 __ j(above_equal, &not_string, Label::kNear);
2419 __ Ret();
2420 __ bind(&not_string);
2421
2422 Label not_heap_number;
2423 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2424 __ j(not_equal, &not_heap_number, Label::kNear);
2425 __ bind(&is_number);
2426 NumberToStringStub stub(isolate());
2427 __ TailCallStub(&stub);
2428 __ bind(&not_heap_number);
2429
2430 Label not_oddball;
2431 __ CmpInstanceType(edi, ODDBALL_TYPE);
2432 __ j(not_equal, &not_oddball, Label::kNear);
2433 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
2434 __ Ret();
2435 __ bind(&not_oddball);
2436
2437 __ pop(ecx); // Pop return address.
2438 __ push(eax); // Push argument.
2439 __ push(ecx); // Push return address.
2440 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002441}
2442
2443
Ben Murdoch097c5b22016-05-18 11:27:45 +01002444void ToNameStub::Generate(MacroAssembler* masm) {
2445 // The ToName stub takes one argument in eax.
2446 Label is_number;
2447 __ JumpIfSmi(eax, &is_number, Label::kNear);
2448
2449 Label not_name;
2450 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2451 __ CmpObjectType(eax, LAST_NAME_TYPE, edi);
2452 // eax: receiver
2453 // edi: receiver map
2454 __ j(above, &not_name, Label::kNear);
2455 __ Ret();
2456 __ bind(&not_name);
2457
2458 Label not_heap_number;
2459 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2460 __ j(not_equal, &not_heap_number, Label::kNear);
2461 __ bind(&is_number);
2462 NumberToStringStub stub(isolate());
2463 __ TailCallStub(&stub);
2464 __ bind(&not_heap_number);
2465
2466 Label not_oddball;
2467 __ CmpInstanceType(edi, ODDBALL_TYPE);
2468 __ j(not_equal, &not_oddball, Label::kNear);
2469 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
2470 __ Ret();
2471 __ bind(&not_oddball);
2472
2473 __ pop(ecx); // Pop return address.
2474 __ push(eax); // Push argument.
2475 __ push(ecx); // Push return address.
2476 __ TailCallRuntime(Runtime::kToName);
2477}
2478
2479
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002480void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2481 Register left,
2482 Register right,
2483 Register scratch1,
2484 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002485 Register length = scratch1;
2486
2487 // Compare lengths.
2488 Label strings_not_equal, check_zero_length;
2489 __ mov(length, FieldOperand(left, String::kLengthOffset));
2490 __ cmp(length, FieldOperand(right, String::kLengthOffset));
2491 __ j(equal, &check_zero_length, Label::kNear);
2492 __ bind(&strings_not_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002493 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002494 __ ret(0);
2495
2496 // Check if the length is zero.
2497 Label compare_chars;
2498 __ bind(&check_zero_length);
2499 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002500 __ test(length, length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002501 __ j(not_zero, &compare_chars, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002502 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002503 __ ret(0);
2504
2505 // Compare characters.
2506 __ bind(&compare_chars);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002507 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2508 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002509
2510 // Characters are equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002512 __ ret(0);
2513}
2514
2515
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002516void StringHelper::GenerateCompareFlatOneByteStrings(
2517 MacroAssembler* masm, Register left, Register right, Register scratch1,
2518 Register scratch2, Register scratch3) {
Steve Block44f0eee2011-05-26 01:26:41 +01002519 Counters* counters = masm->isolate()->counters();
2520 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002521
2522 // Find minimum length.
Ben Murdoch257744e2011-11-30 15:57:28 +00002523 Label left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002524 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
2525 __ mov(scratch3, scratch1);
2526 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
2527
2528 Register length_delta = scratch3;
2529
Ben Murdoch257744e2011-11-30 15:57:28 +00002530 __ j(less_equal, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002531 // Right string is shorter. Change scratch1 to be length of right string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002532 __ sub(scratch1, length_delta);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002533 __ bind(&left_shorter);
2534
2535 Register min_length = scratch1;
2536
2537 // If either length is zero, just compare lengths.
Ben Murdoch257744e2011-11-30 15:57:28 +00002538 Label compare_lengths;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002539 __ test(min_length, min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00002540 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002541
Ben Murdoch257744e2011-11-30 15:57:28 +00002542 // Compare characters.
2543 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002544 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2545 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002546
2547 // Compare lengths - strings up to min-length are equal.
2548 __ bind(&compare_lengths);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002549 __ test(length_delta, length_delta);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002550 Label length_not_equal;
2551 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002552
2553 // Result is EQUAL.
2554 STATIC_ASSERT(EQUAL == 0);
2555 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002557 __ ret(0);
2558
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560 Label result_less;
2561 __ bind(&length_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002562 __ j(greater, &result_greater, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002563 __ jmp(&result_less, Label::kNear);
2564 __ bind(&result_not_equal);
2565 __ j(above, &result_greater, Label::kNear);
2566 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002567
2568 // Result is LESS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002569 __ Move(eax, Immediate(Smi::FromInt(LESS)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002570 __ ret(0);
2571
2572 // Result is GREATER.
2573 __ bind(&result_greater);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002574 __ Move(eax, Immediate(Smi::FromInt(GREATER)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002575 __ ret(0);
2576}
2577
2578
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002579void StringHelper::GenerateOneByteCharsCompareLoop(
2580 MacroAssembler* masm, Register left, Register right, Register length,
2581 Register scratch, Label* chars_not_equal,
Ben Murdoch257744e2011-11-30 15:57:28 +00002582 Label::Distance chars_not_equal_near) {
2583 // Change index to run from -length to -1 by adding length to string
2584 // start. This means that loop ends when index reaches zero, which
2585 // doesn't need an additional compare.
2586 __ SmiUntag(length);
2587 __ lea(left,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002588 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002589 __ lea(right,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002590 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002591 __ neg(length);
2592 Register index = length; // index = -length;
2593
2594 // Compare loop.
2595 Label loop;
2596 __ bind(&loop);
2597 __ mov_b(scratch, Operand(left, index, times_1, 0));
2598 __ cmpb(scratch, Operand(right, index, times_1, 0));
2599 __ j(not_equal, chars_not_equal, chars_not_equal_near);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002600 __ inc(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00002601 __ j(not_zero, &loop);
2602}
2603
2604
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002605void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2606 // ----------- S t a t e -------------
2607 // -- edx : left
2608 // -- eax : right
2609 // -- esp[0] : return address
2610 // -----------------------------------
2611
2612 // Load ecx with the allocation site. We stick an undefined dummy value here
2613 // and replace it with the real allocation site later when we instantiate this
2614 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
Ben Murdoch61f157c2016-09-16 13:49:30 +01002615 __ mov(ecx, isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002616
2617 // Make sure that we actually patched the allocation site.
2618 if (FLAG_debug_code) {
2619 __ test(ecx, Immediate(kSmiTagMask));
2620 __ Assert(not_equal, kExpectedAllocationSite);
2621 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
2622 isolate()->factory()->allocation_site_map());
2623 __ Assert(equal, kExpectedAllocationSite);
2624 }
2625
2626 // Tail call into the stub that handles binary operations with allocation
2627 // sites.
2628 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2629 __ TailCallStub(&stub);
2630}
2631
2632
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002633void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2634 DCHECK_EQ(CompareICState::BOOLEAN, state());
2635 Label miss;
2636 Label::Distance const miss_distance =
2637 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2638
2639 __ JumpIfSmi(edx, &miss, miss_distance);
2640 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
2641 __ JumpIfSmi(eax, &miss, miss_distance);
2642 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2643 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2644 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002645 if (!Token::IsEqualityOp(op())) {
2646 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2647 __ AssertSmi(eax);
2648 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
2649 __ AssertSmi(edx);
2650 __ push(eax);
2651 __ mov(eax, edx);
2652 __ pop(edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002653 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002654 __ sub(eax, edx);
2655 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002656
2657 __ bind(&miss);
2658 GenerateMiss(masm);
2659}
2660
2661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002662void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2663 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00002664 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002665 __ mov(ecx, edx);
2666 __ or_(ecx, eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002667 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002668
2669 if (GetCondition() == equal) {
2670 // For equality we do not care about the sign of the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002671 __ sub(eax, edx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002672 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002673 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002674 __ sub(edx, eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00002675 __ j(no_overflow, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002676 // Correct sign of result in case of overflow.
2677 __ not_(edx);
2678 __ bind(&done);
2679 __ mov(eax, edx);
2680 }
2681 __ ret(0);
2682
2683 __ bind(&miss);
2684 GenerateMiss(masm);
2685}
2686
2687
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002688void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2689 DCHECK(state() == CompareICState::NUMBER);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002690
Ben Murdoch257744e2011-11-30 15:57:28 +00002691 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002692 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00002693 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002694
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002695 if (left() == CompareICState::SMI) {
2696 __ JumpIfNotSmi(edx, &miss);
2697 }
2698 if (right() == CompareICState::SMI) {
2699 __ JumpIfNotSmi(eax, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002700 }
2701
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 // Load left and right operand.
2703 Label done, left, left_smi, right_smi;
2704 __ JumpIfSmi(eax, &right_smi, Label::kNear);
2705 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2706 isolate()->factory()->heap_number_map());
2707 __ j(not_equal, &maybe_undefined1, Label::kNear);
2708 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2709 __ jmp(&left, Label::kNear);
2710 __ bind(&right_smi);
2711 __ mov(ecx, eax); // Can't clobber eax because we can still jump away.
2712 __ SmiUntag(ecx);
2713 __ Cvtsi2sd(xmm1, ecx);
2714
2715 __ bind(&left);
2716 __ JumpIfSmi(edx, &left_smi, Label::kNear);
2717 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2718 isolate()->factory()->heap_number_map());
2719 __ j(not_equal, &maybe_undefined2, Label::kNear);
2720 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2721 __ jmp(&done);
2722 __ bind(&left_smi);
2723 __ mov(ecx, edx); // Can't clobber edx because we can still jump away.
2724 __ SmiUntag(ecx);
2725 __ Cvtsi2sd(xmm0, ecx);
2726
2727 __ bind(&done);
2728 // Compare operands.
2729 __ ucomisd(xmm0, xmm1);
2730
2731 // Don't base result on EFLAGS when a NaN is involved.
2732 __ j(parity_even, &unordered, Label::kNear);
2733
2734 // Return a result of -1, 0, or 1, based on EFLAGS.
2735 // Performing mov, because xor would destroy the flag register.
2736 __ mov(eax, 0); // equal
2737 __ mov(ecx, Immediate(Smi::FromInt(1)));
2738 __ cmov(above, eax, ecx);
2739 __ mov(ecx, Immediate(Smi::FromInt(-1)));
2740 __ cmov(below, eax, ecx);
2741 __ ret(0);
2742
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002743 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002744 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002745 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002747 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2748
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002749 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002750 if (Token::IsOrderedRelationalCompareOp(op())) {
2751 __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002752 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002753 __ JumpIfSmi(edx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002754 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
2755 __ j(not_equal, &maybe_undefined2, Label::kNear);
2756 __ jmp(&unordered);
2757 }
2758
2759 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002760 if (Token::IsOrderedRelationalCompareOp(op())) {
2761 __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002762 __ j(equal, &unordered);
2763 }
2764
Ben Murdochb0fe1622011-05-05 13:52:32 +01002765 __ bind(&miss);
2766 GenerateMiss(masm);
2767}
2768
2769
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002770void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2771 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2772 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002773
2774 // Registers containing left and right operands respectively.
2775 Register left = edx;
2776 Register right = eax;
2777 Register tmp1 = ecx;
2778 Register tmp2 = ebx;
2779
2780 // Check that both operands are heap objects.
2781 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002782 __ mov(tmp1, left);
Ben Murdoch257744e2011-11-30 15:57:28 +00002783 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002784 __ and_(tmp1, right);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002785 __ JumpIfSmi(tmp1, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002786
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002787 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002788 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2789 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2790 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2791 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002792 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2793 __ or_(tmp1, tmp2);
2794 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2795 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00002796
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002797 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002798 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002799 __ cmp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002800 // Make sure eax is non-zero. At this point input operands are
2801 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002802 DCHECK(right.is(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002803 __ j(not_equal, &done, Label::kNear);
2804 STATIC_ASSERT(EQUAL == 0);
2805 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002806 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002807 __ bind(&done);
2808 __ ret(0);
2809
2810 __ bind(&miss);
2811 GenerateMiss(masm);
2812}
2813
2814
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002815void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2816 DCHECK(state() == CompareICState::UNIQUE_NAME);
2817 DCHECK(GetCondition() == equal);
2818
2819 // Registers containing left and right operands respectively.
2820 Register left = edx;
2821 Register right = eax;
2822 Register tmp1 = ecx;
2823 Register tmp2 = ebx;
2824
2825 // Check that both operands are heap objects.
2826 Label miss;
2827 __ mov(tmp1, left);
2828 STATIC_ASSERT(kSmiTag == 0);
2829 __ and_(tmp1, right);
2830 __ JumpIfSmi(tmp1, &miss, Label::kNear);
2831
2832 // Check that both operands are unique names. This leaves the instance
2833 // types loaded in tmp1 and tmp2.
2834 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2835 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2836 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2837 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2838
2839 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2840 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2841
2842 // Unique names are compared by identity.
2843 Label done;
2844 __ cmp(left, right);
2845 // Make sure eax is non-zero. At this point input operands are
2846 // guaranteed to be non-zero.
2847 DCHECK(right.is(eax));
2848 __ j(not_equal, &done, Label::kNear);
2849 STATIC_ASSERT(EQUAL == 0);
2850 STATIC_ASSERT(kSmiTag == 0);
2851 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2852 __ bind(&done);
2853 __ ret(0);
2854
2855 __ bind(&miss);
2856 GenerateMiss(masm);
2857}
2858
2859
2860void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2861 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00002862 Label miss;
2863
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002864 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002865
Ben Murdoch257744e2011-11-30 15:57:28 +00002866 // Registers containing left and right operands respectively.
2867 Register left = edx;
2868 Register right = eax;
2869 Register tmp1 = ecx;
2870 Register tmp2 = ebx;
2871 Register tmp3 = edi;
2872
2873 // Check that both operands are heap objects.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002874 __ mov(tmp1, left);
Ben Murdoch257744e2011-11-30 15:57:28 +00002875 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002876 __ and_(tmp1, right);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002877 __ JumpIfSmi(tmp1, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00002878
2879 // Check that both operands are strings. This leaves the instance
2880 // types loaded in tmp1 and tmp2.
2881 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2882 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2883 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2884 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2885 __ mov(tmp3, tmp1);
2886 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002887 __ or_(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00002888 __ test(tmp3, Immediate(kIsNotStringMask));
2889 __ j(not_zero, &miss);
2890
2891 // Fast check for identical strings.
2892 Label not_same;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002893 __ cmp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00002894 __ j(not_equal, &not_same, Label::kNear);
2895 STATIC_ASSERT(EQUAL == 0);
2896 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002897 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002898 __ ret(0);
2899
2900 // Handle not identical strings.
2901 __ bind(&not_same);
2902
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002903 // Check that both strings are internalized. If they are, we're done
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002904 // because we already know they are not identical. But in the case of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002905 // non-equality compare, we still need to determine the order. We
2906 // also know they are both strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002907 if (equality) {
2908 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 STATIC_ASSERT(kInternalizedTag == 0);
2910 __ or_(tmp1, tmp2);
2911 __ test(tmp1, Immediate(kIsNotInternalizedMask));
2912 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002913 // Make sure eax is non-zero. At this point input operands are
2914 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002915 DCHECK(right.is(eax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002916 __ ret(0);
2917 __ bind(&do_compare);
2918 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002919
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002920 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00002921 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002922 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002923
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002924 // Compare flat one byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002925 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002926 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2927 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002928 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002929 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2930 tmp2, tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002931 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002932
2933 // Handle more complex cases in runtime.
2934 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002935 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01002936 {
2937 FrameScope scope(masm, StackFrame::INTERNAL);
2938 __ Push(left);
2939 __ Push(right);
2940 __ CallRuntime(Runtime::kStringEqual);
2941 }
2942 __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
2943 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002944 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01002945 __ pop(tmp1); // Return address.
2946 __ push(left);
2947 __ push(right);
2948 __ push(tmp1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002949 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002950 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002951
2952 __ bind(&miss);
2953 GenerateMiss(masm);
2954}
2955
2956
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002957void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2958 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00002959 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002960 __ mov(ecx, edx);
2961 __ and_(ecx, eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002962 __ JumpIfSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002963
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002964 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2965 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
2966 __ j(below, &miss, Label::kNear);
2967 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
2968 __ j(below, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002969
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002970 DCHECK_EQ(equal, GetCondition());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002971 __ sub(eax, edx);
2972 __ ret(0);
2973
2974 __ bind(&miss);
2975 GenerateMiss(masm);
2976}
2977
2978
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002979void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002980 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002981 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002982 __ mov(ecx, edx);
2983 __ and_(ecx, eax);
2984 __ JumpIfSmi(ecx, &miss, Label::kNear);
2985
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002986 __ GetWeakValue(edi, cell);
2987 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002988 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002989 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002990 __ j(not_equal, &miss, Label::kNear);
2991
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002992 if (Token::IsEqualityOp(op())) {
2993 __ sub(eax, edx);
2994 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002995 } else {
2996 __ PopReturnAddressTo(ecx);
2997 __ Push(edx);
2998 __ Push(eax);
2999 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
3000 __ PushReturnAddressFrom(ecx);
3001 __ TailCallRuntime(Runtime::kCompare);
3002 }
Ben Murdochc7cc0282012-03-05 14:35:55 +00003003
3004 __ bind(&miss);
3005 GenerateMiss(masm);
3006}
3007
3008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003009void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003010 {
3011 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003012 FrameScope scope(masm, StackFrame::INTERNAL);
3013 __ push(edx); // Preserve edx and eax.
3014 __ push(eax);
3015 __ push(edx); // And also use them as the arguments.
3016 __ push(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017 __ push(Immediate(Smi::FromInt(op())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003018 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003019 // Compute the entry point of the rewritten stub.
3020 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
3021 __ pop(eax);
3022 __ pop(edx);
3023 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003024
Ben Murdochb0fe1622011-05-05 13:52:32 +01003025 // Do a tail call to the rewritten stub.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003026 __ jmp(edi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003027}
3028
3029
Ben Murdoch257744e2011-11-30 15:57:28 +00003030// Helper function used to check that the dictionary doesn't contain
3031// the property. This function may return false negatives, so miss_label
3032// must always call a backup property check that is complete.
3033// This function is safe to call if the receiver has fast properties.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003034// Name must be a unique name and receiver must be a heap object.
3035void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3036 Label* miss,
3037 Label* done,
3038 Register properties,
3039 Handle<Name> name,
3040 Register r0) {
3041 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003042
3043 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3044 // not equal to the name and kProbes-th slot is not used (its name is the
3045 // undefined value), it guarantees the hash table doesn't contain the
3046 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003047 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003048 for (int i = 0; i < kInlinedProbes; i++) {
3049 // Compute the masked index: (hash + i + i * i) & mask.
3050 Register index = r0;
3051 // Capacity is smi 2^n.
3052 __ mov(index, FieldOperand(properties, kCapacityOffset));
3053 __ dec(index);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003054 __ and_(index,
3055 Immediate(Smi::FromInt(name->Hash() +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003056 NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003057
3058 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003059 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003060 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
3061 Register entity_name = r0;
3062 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003063 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003064 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
3065 kElementsStartOffset - kHeapObjectTag));
3066 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3067 __ j(equal, done);
3068
3069 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003070 __ cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003071 __ j(equal, miss);
3072
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003073 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003074 // Check for the hole and skip.
3075 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003076 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003077
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003078 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003079 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003080 __ JumpIfNotUniqueNameInstanceType(
3081 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3082 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003083 }
3084
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003085 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3086 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003087 __ push(Immediate(Handle<Object>(name)));
3088 __ push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003089 __ CallStub(&stub);
3090 __ test(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003091 __ j(not_zero, miss);
3092 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003093}
3094
3095
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003096// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003097// |done| label if a property with the given name is found leaving the
3098// index into the dictionary in |r0|. Jump to the |miss| label
3099// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003100void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3101 Label* miss,
3102 Label* done,
3103 Register elements,
3104 Register name,
3105 Register r0,
3106 Register r1) {
3107 DCHECK(!elements.is(r0));
3108 DCHECK(!elements.is(r1));
3109 DCHECK(!name.is(r0));
3110 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003111
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003112 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003113
3114 __ mov(r1, FieldOperand(elements, kCapacityOffset));
3115 __ shr(r1, kSmiTagSize); // convert smi to int
3116 __ dec(r1);
3117
3118 // Generate an unrolled loop that performs a few probes before
3119 // giving up. Measurements done on Gmail indicate that 2 probes
3120 // cover ~93% of loads from dictionaries.
3121 for (int i = 0; i < kInlinedProbes; i++) {
3122 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003123 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3124 __ shr(r0, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003125 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003126 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003127 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003128 __ and_(r0, r1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003129
3130 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003131 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003132 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
3133
3134 // Check if the key is identical to the name.
3135 __ cmp(name, Operand(elements,
3136 r0,
3137 times_4,
3138 kElementsStartOffset - kHeapObjectTag));
3139 __ j(equal, done);
3140 }
3141
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003142 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
3143 POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003144 __ push(name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003145 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3146 __ shr(r0, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003147 __ push(r0);
3148 __ CallStub(&stub);
3149
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003150 __ test(r1, r1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003151 __ j(zero, miss);
3152 __ jmp(done);
3153}
3154
3155
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003156void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003157 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3158 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003159 // Stack frame on entry:
3160 // esp[0 * kPointerSize]: return address.
3161 // esp[1 * kPointerSize]: key's hash.
3162 // esp[2 * kPointerSize]: key.
3163 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003164 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003165 // result_: used as scratch.
3166 // index_: will hold an index of entry if lookup is successful.
3167 // might alias with result_.
3168 // Returns:
3169 // result_ is zero if lookup failed, non zero otherwise.
3170
3171 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3172
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003173 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003174
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003175 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003176 __ dec(scratch);
3177 __ SmiUntag(scratch);
3178 __ push(scratch);
3179
3180 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3181 // not equal to the name and kProbes-th slot is not used (its name is the
3182 // undefined value), it guarantees the hash table doesn't contain the
3183 // property. It's true even if some slots represent deleted properties
3184 // (their names are the null value).
3185 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3186 // Compute the masked index: (hash + i + i * i) & mask.
3187 __ mov(scratch, Operand(esp, 2 * kPointerSize));
3188 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003189 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003190 }
3191 __ and_(scratch, Operand(esp, 0));
3192
3193 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003194 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003195 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003196
3197 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003198 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003199 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003200 kElementsStartOffset - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003201 __ cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003202 __ j(equal, &not_in_dictionary);
3203
3204 // Stop if found the property.
3205 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
3206 __ j(equal, &in_dictionary);
3207
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003208 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3209 // If we hit a key that is not a unique name during negative
3210 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003211 // key we are looking for.
3212
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003213 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003214 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003215 __ JumpIfNotUniqueNameInstanceType(
3216 FieldOperand(scratch, Map::kInstanceTypeOffset),
3217 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003218 }
3219 }
3220
3221 __ bind(&maybe_in_dictionary);
3222 // If we are doing negative lookup then probing failure should be
3223 // treated as a lookup success. For positive lookup probing failure
3224 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003225 if (mode() == POSITIVE_LOOKUP) {
3226 __ mov(result(), Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003227 __ Drop(1);
3228 __ ret(2 * kPointerSize);
3229 }
3230
3231 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003232 __ mov(result(), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003233 __ Drop(1);
3234 __ ret(2 * kPointerSize);
3235
3236 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003237 __ mov(result(), Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003238 __ Drop(1);
3239 __ ret(2 * kPointerSize);
3240}
3241
3242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3244 Isolate* isolate) {
3245 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
3246 stub.GetCode();
3247 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3248 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003249}
3250
3251
3252// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3253// the value has just been written into the object, now this stub makes sure
3254// we keep the GC informed. The word in the object where the value has been
3255// written is in the address register.
3256void RecordWriteStub::Generate(MacroAssembler* masm) {
3257 Label skip_to_incremental_noncompacting;
3258 Label skip_to_incremental_compacting;
3259
3260 // The first two instructions are generated with labels so as to get the
3261 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3262 // forth between a compare instructions (a nop in this position) and the
3263 // real branch when we start and stop incremental heap marking.
3264 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3265 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3266
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003267 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3268 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003269 MacroAssembler::kReturnAtEnd);
3270 } else {
3271 __ ret(0);
3272 }
3273
3274 __ bind(&skip_to_incremental_noncompacting);
3275 GenerateIncremental(masm, INCREMENTAL);
3276
3277 __ bind(&skip_to_incremental_compacting);
3278 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3279
3280 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3281 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3282 masm->set_byte_at(0, kTwoByteNopInstruction);
3283 masm->set_byte_at(2, kFiveByteNopInstruction);
3284}
3285
3286
3287void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3288 regs_.Save(masm);
3289
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003290 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003291 Label dont_need_remembered_set;
3292
3293 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3294 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3295 regs_.scratch0(),
3296 &dont_need_remembered_set);
3297
Ben Murdoch097c5b22016-05-18 11:27:45 +01003298 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3299 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003300
3301 // First notify the incremental marker if necessary, then update the
3302 // remembered set.
3303 CheckNeedsToInformIncrementalMarker(
3304 masm,
3305 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
3306 mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003307 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003308 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003310 MacroAssembler::kReturnAtEnd);
3311
3312 __ bind(&dont_need_remembered_set);
3313 }
3314
3315 CheckNeedsToInformIncrementalMarker(
3316 masm,
3317 kReturnOnNoNeedToInformIncrementalMarker,
3318 mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003319 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003320 regs_.Restore(masm);
3321 __ ret(0);
3322}
3323
3324
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003325void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3326 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003327 int argument_count = 3;
3328 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3329 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003330 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003331 __ mov(Operand(esp, 2 * kPointerSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003332 Immediate(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003333
3334 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003335 __ CallCFunction(
3336 ExternalReference::incremental_marking_record_write_function(isolate()),
3337 argument_count);
3338
3339 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003340}
3341
3342
3343void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3344 MacroAssembler* masm,
3345 OnNoNeedToInformIncrementalMarker on_no_need,
3346 Mode mode) {
3347 Label object_is_black, need_incremental, need_incremental_pop_object;
3348
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003349 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3350 __ and_(regs_.scratch0(), regs_.object());
3351 __ mov(regs_.scratch1(),
3352 Operand(regs_.scratch0(),
3353 MemoryChunk::kWriteBarrierCounterOffset));
3354 __ sub(regs_.scratch1(), Immediate(1));
3355 __ mov(Operand(regs_.scratch0(),
3356 MemoryChunk::kWriteBarrierCounterOffset),
3357 regs_.scratch1());
3358 __ j(negative, &need_incremental);
3359
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003360 // Let's look at the color of the object: If it is not black we don't have
3361 // to inform the incremental marker.
3362 __ JumpIfBlack(regs_.object(),
3363 regs_.scratch0(),
3364 regs_.scratch1(),
3365 &object_is_black,
3366 Label::kNear);
3367
3368 regs_.Restore(masm);
3369 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003370 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003371 MacroAssembler::kReturnAtEnd);
3372 } else {
3373 __ ret(0);
3374 }
3375
3376 __ bind(&object_is_black);
3377
3378 // Get the value from the slot.
3379 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3380
3381 if (mode == INCREMENTAL_COMPACTION) {
3382 Label ensure_not_white;
3383
3384 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3385 regs_.scratch1(), // Scratch.
3386 MemoryChunk::kEvacuationCandidateMask,
3387 zero,
3388 &ensure_not_white,
3389 Label::kNear);
3390
3391 __ CheckPageFlag(regs_.object(),
3392 regs_.scratch1(), // Scratch.
3393 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3394 not_zero,
3395 &ensure_not_white,
3396 Label::kNear);
3397
3398 __ jmp(&need_incremental);
3399
3400 __ bind(&ensure_not_white);
3401 }
3402
3403 // We need an extra register for this, so we push the object register
3404 // temporarily.
3405 __ push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 __ JumpIfWhite(regs_.scratch0(), // The value.
3407 regs_.scratch1(), // Scratch.
3408 regs_.object(), // Scratch.
3409 &need_incremental_pop_object, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003410 __ pop(regs_.object());
3411
3412 regs_.Restore(masm);
3413 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003414 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003415 MacroAssembler::kReturnAtEnd);
3416 } else {
3417 __ ret(0);
3418 }
3419
3420 __ bind(&need_incremental_pop_object);
3421 __ pop(regs_.object());
3422
3423 __ bind(&need_incremental);
3424
3425 // Fall through when we need to inform the incremental marker.
3426}
3427
3428
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003429void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3430 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3431 __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
3432 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003433 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003434 __ mov(ebx, MemOperand(ebp, parameter_count_offset));
3435 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3436 __ pop(ecx);
3437 int additional_offset =
3438 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3439 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
3440 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
3441}
3442
3443
3444void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003445 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003446 LoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003447 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003448}
3449
3450
3451void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003452 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003453 KeyedLoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003454 stub.GenerateForTrampoline(masm);
3455}
3456
3457
3458static void HandleArrayCases(MacroAssembler* masm, Register receiver,
3459 Register key, Register vector, Register slot,
3460 Register feedback, bool is_polymorphic,
3461 Label* miss) {
3462 // feedback initially contains the feedback array
3463 Label next, next_loop, prepare_next;
3464 Label load_smi_map, compare_map;
3465 Label start_polymorphic;
3466
3467 __ push(receiver);
3468 __ push(vector);
3469
3470 Register receiver_map = receiver;
3471 Register cached_map = vector;
3472
3473 // Receiver might not be a heap object.
3474 __ JumpIfSmi(receiver, &load_smi_map);
3475 __ mov(receiver_map, FieldOperand(receiver, 0));
3476 __ bind(&compare_map);
3477 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3478
3479 // A named keyed load might have a 2 element array, all other cases can count
3480 // on an array with at least 2 {map, handler} pairs, so they can go right
3481 // into polymorphic array handling.
3482 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3483 __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
3484
3485 // found, now call handler.
3486 Register handler = feedback;
3487 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3488 __ pop(vector);
3489 __ pop(receiver);
3490 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3491 __ jmp(handler);
3492
3493 if (!is_polymorphic) {
3494 __ bind(&next);
3495 __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
3496 Immediate(Smi::FromInt(2)));
3497 __ j(not_equal, &start_polymorphic);
3498 __ pop(vector);
3499 __ pop(receiver);
3500 __ jmp(miss);
3501 }
3502
3503 // Polymorphic, we have to loop from 2 to N
3504 __ bind(&start_polymorphic);
3505 __ push(key);
3506 Register counter = key;
3507 __ mov(counter, Immediate(Smi::FromInt(2)));
3508 __ bind(&next_loop);
3509 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3510 FixedArray::kHeaderSize));
3511 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3512 __ j(not_equal, &prepare_next);
3513 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
3514 FixedArray::kHeaderSize + kPointerSize));
3515 __ pop(key);
3516 __ pop(vector);
3517 __ pop(receiver);
3518 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3519 __ jmp(handler);
3520
3521 __ bind(&prepare_next);
3522 __ add(counter, Immediate(Smi::FromInt(2)));
3523 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3524 __ j(less, &next_loop);
3525
3526 // We exhausted our array of map handler pairs.
3527 __ pop(key);
3528 __ pop(vector);
3529 __ pop(receiver);
3530 __ jmp(miss);
3531
3532 __ bind(&load_smi_map);
3533 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3534 __ jmp(&compare_map);
3535}
3536
3537
3538static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3539 Register key, Register vector, Register slot,
3540 Register weak_cell, Label* miss) {
3541 // feedback initially contains the feedback array
3542 Label compare_smi_map;
3543
3544 // Move the weak map into the weak_cell register.
3545 Register ic_map = weak_cell;
3546 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
3547
3548 // Receiver might not be a heap object.
3549 __ JumpIfSmi(receiver, &compare_smi_map);
3550 __ cmp(ic_map, FieldOperand(receiver, 0));
3551 __ j(not_equal, miss);
3552 Register handler = weak_cell;
3553 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
3554 FixedArray::kHeaderSize + kPointerSize));
3555 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3556 __ jmp(handler);
3557
3558 // In microbenchmarks, it made sense to unroll this code so that the call to
3559 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
3560 __ bind(&compare_smi_map);
3561 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
3562 __ j(not_equal, miss);
3563 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
3564 FixedArray::kHeaderSize + kPointerSize));
3565 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3566 __ jmp(handler);
3567}
3568
3569
3570void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3571
3572
3573void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3574 GenerateImpl(masm, true);
3575}
3576
3577
3578void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3579 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
3580 Register name = LoadWithVectorDescriptor::NameRegister(); // ecx
3581 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
3582 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
3583 Register scratch = edi;
3584 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3585 FixedArray::kHeaderSize));
3586
3587 // Is it a weak cell?
3588 Label try_array;
3589 Label not_array, smi_key, key_okay, miss;
3590 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3591 __ j(not_equal, &try_array);
3592 HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
3593
3594 // Is it a fixed array?
3595 __ bind(&try_array);
3596 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3597 __ j(not_equal, &not_array);
3598 HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
3599
3600 __ bind(&not_array);
3601 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3602 __ j(not_equal, &miss);
3603 __ push(slot);
3604 __ push(vector);
Ben Murdochc5610432016-08-08 18:44:38 +01003605 Code::Flags code_flags =
3606 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003607 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3608 receiver, name, vector, scratch);
3609 __ pop(vector);
3610 __ pop(slot);
3611
3612 __ bind(&miss);
3613 LoadIC::GenerateMiss(masm);
3614}
3615
3616
3617void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3618 GenerateImpl(masm, false);
3619}
3620
3621
3622void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3623 GenerateImpl(masm, true);
3624}
3625
3626
3627void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3628 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
3629 Register key = LoadWithVectorDescriptor::NameRegister(); // ecx
3630 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
3631 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
3632 Register feedback = edi;
3633 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
3634 FixedArray::kHeaderSize));
3635 // Is it a weak cell?
3636 Label try_array;
3637 Label not_array, smi_key, key_okay, miss;
3638 __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
3639 __ j(not_equal, &try_array);
3640 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
3641
3642 __ bind(&try_array);
3643 // Is it a fixed array?
3644 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3645 __ j(not_equal, &not_array);
3646
3647 // We have a polymorphic element handler.
3648 Label polymorphic, try_poly_name;
3649 __ bind(&polymorphic);
3650 HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
3651
3652 __ bind(&not_array);
3653 // Is it generic?
3654 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3655 __ j(not_equal, &try_poly_name);
3656 Handle<Code> megamorphic_stub =
3657 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3658 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3659
3660 __ bind(&try_poly_name);
3661 // We might have a name in feedback, and a fixed array in the next slot.
3662 __ cmp(key, feedback);
3663 __ j(not_equal, &miss);
3664 // If the name comparison succeeded, we know we have a fixed array with
3665 // at least one map/handler pair.
3666 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
3667 FixedArray::kHeaderSize + kPointerSize));
3668 HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
3669
3670 __ bind(&miss);
3671 KeyedLoadIC::GenerateMiss(masm);
3672}
3673
3674
3675void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3676 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3677 VectorStoreICStub stub(isolate(), state());
3678 stub.GenerateForTrampoline(masm);
3679}
3680
3681
3682void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3683 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3684 VectorKeyedStoreICStub stub(isolate(), state());
3685 stub.GenerateForTrampoline(masm);
3686}
3687
3688
3689void VectorStoreICStub::Generate(MacroAssembler* masm) {
3690 GenerateImpl(masm, false);
3691}
3692
3693
3694void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3695 GenerateImpl(masm, true);
3696}
3697
3698
3699// value is on the stack already.
3700static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
3701 Register key, Register vector,
3702 Register slot, Register feedback,
3703 bool is_polymorphic, Label* miss) {
3704 // feedback initially contains the feedback array
3705 Label next, next_loop, prepare_next;
3706 Label load_smi_map, compare_map;
3707 Label start_polymorphic;
3708 Label pop_and_miss;
3709 ExternalReference virtual_register =
3710 ExternalReference::virtual_handler_register(masm->isolate());
3711
3712 __ push(receiver);
3713 __ push(vector);
3714
3715 Register receiver_map = receiver;
3716 Register cached_map = vector;
3717
3718 // Receiver might not be a heap object.
3719 __ JumpIfSmi(receiver, &load_smi_map);
3720 __ mov(receiver_map, FieldOperand(receiver, 0));
3721 __ bind(&compare_map);
3722 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3723
3724 // A named keyed store might have a 2 element array, all other cases can count
3725 // on an array with at least 2 {map, handler} pairs, so they can go right
3726 // into polymorphic array handling.
3727 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3728 __ j(not_equal, &start_polymorphic);
3729
3730 // found, now call handler.
3731 Register handler = feedback;
3732 DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
3733 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3734 __ pop(vector);
3735 __ pop(receiver);
3736 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3737 __ mov(Operand::StaticVariable(virtual_register), handler);
3738 __ pop(handler); // Pop "value".
3739 __ jmp(Operand::StaticVariable(virtual_register));
3740
3741 // Polymorphic, we have to loop from 2 to N
3742 __ bind(&start_polymorphic);
3743 __ push(key);
3744 Register counter = key;
3745 __ mov(counter, Immediate(Smi::FromInt(2)));
3746
3747 if (!is_polymorphic) {
3748 // If is_polymorphic is false, we may only have a two element array.
3749 // Check against length now in that case.
3750 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3751 __ j(greater_equal, &pop_and_miss);
3752 }
3753
3754 __ bind(&next_loop);
3755 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3756 FixedArray::kHeaderSize));
3757 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3758 __ j(not_equal, &prepare_next);
3759 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
3760 FixedArray::kHeaderSize + kPointerSize));
3761 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3762 __ pop(key);
3763 __ pop(vector);
3764 __ pop(receiver);
3765 __ mov(Operand::StaticVariable(virtual_register), handler);
3766 __ pop(handler); // Pop "value".
3767 __ jmp(Operand::StaticVariable(virtual_register));
3768
3769 __ bind(&prepare_next);
3770 __ add(counter, Immediate(Smi::FromInt(2)));
3771 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3772 __ j(less, &next_loop);
3773
3774 // We exhausted our array of map handler pairs.
3775 __ bind(&pop_and_miss);
3776 __ pop(key);
3777 __ pop(vector);
3778 __ pop(receiver);
3779 __ jmp(miss);
3780
3781 __ bind(&load_smi_map);
3782 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3783 __ jmp(&compare_map);
3784}
3785
3786
3787static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
3788 Register key, Register vector,
3789 Register slot, Register weak_cell,
3790 Label* miss) {
3791 // The store ic value is on the stack.
3792 DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
3793 ExternalReference virtual_register =
3794 ExternalReference::virtual_handler_register(masm->isolate());
3795
3796 // feedback initially contains the feedback array
3797 Label compare_smi_map;
3798
3799 // Move the weak map into the weak_cell register.
3800 Register ic_map = weak_cell;
3801 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
3802
3803 // Receiver might not be a heap object.
3804 __ JumpIfSmi(receiver, &compare_smi_map);
3805 __ cmp(ic_map, FieldOperand(receiver, 0));
3806 __ j(not_equal, miss);
3807 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3808 FixedArray::kHeaderSize + kPointerSize));
3809 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3810 // Put the store ic value back in it's register.
3811 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
3812 __ pop(weak_cell); // Pop "value".
3813 // jump to the handler.
3814 __ jmp(Operand::StaticVariable(virtual_register));
3815
3816 // In microbenchmarks, it made sense to unroll this code so that the call to
3817 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
3818 __ bind(&compare_smi_map);
3819 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
3820 __ j(not_equal, miss);
3821 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3822 FixedArray::kHeaderSize + kPointerSize));
3823 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3824 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
3825 __ pop(weak_cell); // Pop "value".
3826 // jump to the handler.
3827 __ jmp(Operand::StaticVariable(virtual_register));
3828}
3829
3830
3831void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3832 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
3833 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
3834 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
3835 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
3836 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
3837 Label miss;
3838
3839 __ push(value);
3840
3841 Register scratch = value;
3842 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3843 FixedArray::kHeaderSize));
3844
3845 // Is it a weak cell?
3846 Label try_array;
3847 Label not_array, smi_key, key_okay;
3848 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3849 __ j(not_equal, &try_array);
3850 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
3851
3852 // Is it a fixed array?
3853 __ bind(&try_array);
3854 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3855 __ j(not_equal, &not_array);
3856 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
3857 &miss);
3858
3859 __ bind(&not_array);
3860 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3861 __ j(not_equal, &miss);
3862
3863 __ pop(value);
3864 __ push(slot);
3865 __ push(vector);
Ben Murdochc5610432016-08-08 18:44:38 +01003866 Code::Flags code_flags =
3867 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003868 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3869 receiver, key, slot, no_reg);
3870 __ pop(vector);
3871 __ pop(slot);
3872 Label no_pop_miss;
3873 __ jmp(&no_pop_miss);
3874
3875 __ bind(&miss);
3876 __ pop(value);
3877 __ bind(&no_pop_miss);
3878 StoreIC::GenerateMiss(masm);
3879}
3880
3881
3882void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3883 GenerateImpl(masm, false);
3884}
3885
3886
3887void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3888 GenerateImpl(masm, true);
3889}
3890
3891
3892static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3893 Register receiver, Register key,
3894 Register vector, Register slot,
3895 Register feedback, Label* miss) {
3896 // feedback initially contains the feedback array
3897 Label next, next_loop, prepare_next;
3898 Label load_smi_map, compare_map;
3899 Label transition_call;
3900 Label pop_and_miss;
3901 ExternalReference virtual_register =
3902 ExternalReference::virtual_handler_register(masm->isolate());
3903 ExternalReference virtual_slot =
3904 ExternalReference::virtual_slot_register(masm->isolate());
3905
3906 __ push(receiver);
3907 __ push(vector);
3908
3909 Register receiver_map = receiver;
3910 Register cached_map = vector;
3911 Register value = StoreDescriptor::ValueRegister();
3912
3913 // Receiver might not be a heap object.
3914 __ JumpIfSmi(receiver, &load_smi_map);
3915 __ mov(receiver_map, FieldOperand(receiver, 0));
3916 __ bind(&compare_map);
3917
3918 // Polymorphic, we have to loop from 0 to N - 1
3919 __ push(key);
3920 // Current stack layout:
3921 // - esp[0] -- key
3922 // - esp[4] -- vector
3923 // - esp[8] -- receiver
3924 // - esp[12] -- value
3925 // - esp[16] -- return address
3926 //
3927 // Required stack layout for handler call:
3928 // - esp[0] -- return address
3929 // - receiver, key, value, vector, slot in registers.
3930 // - handler in virtual register.
3931 Register counter = key;
3932 __ mov(counter, Immediate(Smi::FromInt(0)));
3933 __ bind(&next_loop);
3934 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3935 FixedArray::kHeaderSize));
3936 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3937 __ j(not_equal, &prepare_next);
3938 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3939 FixedArray::kHeaderSize + kPointerSize));
3940 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3941 __ j(not_equal, &transition_call);
3942 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3943 FixedArray::kHeaderSize + 2 * kPointerSize));
3944 __ pop(key);
3945 __ pop(vector);
3946 __ pop(receiver);
3947 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3948 __ mov(Operand::StaticVariable(virtual_register), feedback);
3949 __ pop(value);
3950 __ jmp(Operand::StaticVariable(virtual_register));
3951
3952 __ bind(&transition_call);
3953 // Current stack layout:
3954 // - esp[0] -- key
3955 // - esp[4] -- vector
3956 // - esp[8] -- receiver
3957 // - esp[12] -- value
3958 // - esp[16] -- return address
3959 //
3960 // Required stack layout for handler call:
3961 // - esp[0] -- return address
3962 // - receiver, key, value, map, vector in registers.
3963 // - handler and slot in virtual registers.
3964 __ mov(Operand::StaticVariable(virtual_slot), slot);
3965 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3966 FixedArray::kHeaderSize + 2 * kPointerSize));
3967 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3968 __ mov(Operand::StaticVariable(virtual_register), feedback);
3969
3970 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3971 // The weak cell may have been cleared.
3972 __ JumpIfSmi(cached_map, &pop_and_miss);
3973 DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3974 __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
3975
3976 // Pop key into place.
3977 __ pop(key);
3978 __ pop(vector);
3979 __ pop(receiver);
3980 __ pop(value);
3981 __ jmp(Operand::StaticVariable(virtual_register));
3982
3983 __ bind(&prepare_next);
3984 __ add(counter, Immediate(Smi::FromInt(3)));
3985 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3986 __ j(less, &next_loop);
3987
3988 // We exhausted our array of map handler pairs.
3989 __ bind(&pop_and_miss);
3990 __ pop(key);
3991 __ pop(vector);
3992 __ pop(receiver);
3993 __ jmp(miss);
3994
3995 __ bind(&load_smi_map);
3996 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3997 __ jmp(&compare_map);
3998}
3999
4000
4001void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4002 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
4003 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
4004 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
4005 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
4006 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
4007 Label miss;
4008
4009 __ push(value);
4010
4011 Register scratch = value;
4012 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4013 FixedArray::kHeaderSize));
4014
4015 // Is it a weak cell?
4016 Label try_array;
4017 Label not_array, smi_key, key_okay;
4018 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4019 __ j(not_equal, &try_array);
4020 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
4021
4022 // Is it a fixed array?
4023 __ bind(&try_array);
4024 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4025 __ j(not_equal, &not_array);
4026 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
4027 &miss);
4028
4029 __ bind(&not_array);
4030 Label try_poly_name;
4031 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4032 __ j(not_equal, &try_poly_name);
4033
4034 __ pop(value);
4035
4036 Handle<Code> megamorphic_stub =
4037 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4038 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4039
4040 __ bind(&try_poly_name);
4041 // We might have a name in feedback, and a fixed array in the next slot.
4042 __ cmp(key, scratch);
4043 __ j(not_equal, &miss);
4044 // If the name comparison succeeded, we know we have a fixed array with
4045 // at least one map/handler pair.
4046 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4047 FixedArray::kHeaderSize + kPointerSize));
4048 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
4049 &miss);
4050
4051 __ bind(&miss);
4052 __ pop(value);
4053 KeyedStoreIC::GenerateMiss(masm);
4054}
4055
4056
4057void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4058 __ EmitLoadTypeFeedbackVector(ebx);
4059 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004060 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4061}
4062
4063
4064void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4065 if (masm->isolate()->function_entry_hook() != NULL) {
4066 ProfileEntryHookStub stub(masm->isolate());
4067 masm->CallStub(&stub);
4068 }
4069}
4070
4071
4072void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4073 // Save volatile registers.
4074 const int kNumSavedRegisters = 3;
4075 __ push(eax);
4076 __ push(ecx);
4077 __ push(edx);
4078
4079 // Calculate and push the original stack pointer.
4080 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4081 __ push(eax);
4082
4083 // Retrieve our return address and use it to calculate the calling
4084 // function's address.
4085 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4086 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4087 __ push(eax);
4088
4089 // Call the entry hook.
4090 DCHECK(isolate()->function_entry_hook() != NULL);
4091 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
4092 RelocInfo::RUNTIME_ENTRY);
4093 __ add(esp, Immediate(2 * kPointerSize));
4094
4095 // Restore ecx.
4096 __ pop(edx);
4097 __ pop(ecx);
4098 __ pop(eax);
4099
4100 __ ret(0);
4101}
4102
4103
4104template<class T>
4105static void CreateArrayDispatch(MacroAssembler* masm,
4106 AllocationSiteOverrideMode mode) {
4107 if (mode == DISABLE_ALLOCATION_SITES) {
4108 T stub(masm->isolate(),
4109 GetInitialFastElementsKind(),
4110 mode);
4111 __ TailCallStub(&stub);
4112 } else if (mode == DONT_OVERRIDE) {
4113 int last_index = GetSequenceIndexFromFastElementsKind(
4114 TERMINAL_FAST_ELEMENTS_KIND);
4115 for (int i = 0; i <= last_index; ++i) {
4116 Label next;
4117 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4118 __ cmp(edx, kind);
4119 __ j(not_equal, &next);
4120 T stub(masm->isolate(), kind);
4121 __ TailCallStub(&stub);
4122 __ bind(&next);
4123 }
4124
4125 // If we reached this point there is a problem.
4126 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4127 } else {
4128 UNREACHABLE();
4129 }
4130}
4131
4132
4133static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4134 AllocationSiteOverrideMode mode) {
4135 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4136 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4137 // eax - number of arguments
4138 // edi - constructor?
4139 // esp[0] - return address
4140 // esp[4] - last argument
4141 Label normal_sequence;
4142 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004143 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4144 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4145 STATIC_ASSERT(FAST_ELEMENTS == 2);
4146 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4147 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4148 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004149
4150 // is the low bit set? If so, we are holey and that is good.
Ben Murdochda12d292016-06-02 14:46:10 +01004151 __ test_b(edx, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004152 __ j(not_zero, &normal_sequence);
4153 }
4154
4155 // look at the first argument
4156 __ mov(ecx, Operand(esp, kPointerSize));
4157 __ test(ecx, ecx);
4158 __ j(zero, &normal_sequence);
4159
4160 if (mode == DISABLE_ALLOCATION_SITES) {
4161 ElementsKind initial = GetInitialFastElementsKind();
4162 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4163
4164 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4165 holey_initial,
4166 DISABLE_ALLOCATION_SITES);
4167 __ TailCallStub(&stub_holey);
4168
4169 __ bind(&normal_sequence);
4170 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4171 initial,
4172 DISABLE_ALLOCATION_SITES);
4173 __ TailCallStub(&stub);
4174 } else if (mode == DONT_OVERRIDE) {
4175 // We are going to create a holey array, but our kind is non-holey.
4176 // Fix kind and retry.
4177 __ inc(edx);
4178
4179 if (FLAG_debug_code) {
4180 Handle<Map> allocation_site_map =
4181 masm->isolate()->factory()->allocation_site_map();
4182 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4183 __ Assert(equal, kExpectedAllocationSite);
4184 }
4185
4186 // Save the resulting elements kind in type info. We can't just store r3
4187 // in the AllocationSite::transition_info field because elements kind is
4188 // restricted to a portion of the field...upper bits need to be left alone.
4189 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4190 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
4191 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
4192
4193 __ bind(&normal_sequence);
4194 int last_index = GetSequenceIndexFromFastElementsKind(
4195 TERMINAL_FAST_ELEMENTS_KIND);
4196 for (int i = 0; i <= last_index; ++i) {
4197 Label next;
4198 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4199 __ cmp(edx, kind);
4200 __ j(not_equal, &next);
4201 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4202 __ TailCallStub(&stub);
4203 __ bind(&next);
4204 }
4205
4206 // If we reached this point there is a problem.
4207 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4208 } else {
4209 UNREACHABLE();
4210 }
4211}
4212
4213
4214template<class T>
4215static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4216 int to_index = GetSequenceIndexFromFastElementsKind(
4217 TERMINAL_FAST_ELEMENTS_KIND);
4218 for (int i = 0; i <= to_index; ++i) {
4219 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4220 T stub(isolate, kind);
4221 stub.GetCode();
4222 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4223 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4224 stub1.GetCode();
4225 }
4226 }
4227}
4228
Ben Murdoch61f157c2016-09-16 13:49:30 +01004229void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004230 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4231 isolate);
4232 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4233 isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004234 ArrayNArgumentsConstructorStub stub(isolate);
4235 stub.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004236
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004237 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4238 for (int i = 0; i < 2; i++) {
4239 // For internal arrays we only need a few things
4240 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4241 stubh1.GetCode();
4242 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4243 stubh2.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004244 }
4245}
4246
4247
4248void ArrayConstructorStub::GenerateDispatchToArrayStub(
4249 MacroAssembler* masm,
4250 AllocationSiteOverrideMode mode) {
4251 if (argument_count() == ANY) {
4252 Label not_zero_case, not_one_case;
4253 __ test(eax, eax);
4254 __ j(not_zero, &not_zero_case);
4255 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4256
4257 __ bind(&not_zero_case);
4258 __ cmp(eax, 1);
4259 __ j(greater, &not_one_case);
4260 CreateArrayDispatchOneArgument(masm, mode);
4261
4262 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004263 ArrayNArgumentsConstructorStub stub(masm->isolate());
4264 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004265 } else if (argument_count() == NONE) {
4266 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4267 } else if (argument_count() == ONE) {
4268 CreateArrayDispatchOneArgument(masm, mode);
4269 } else if (argument_count() == MORE_THAN_ONE) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004270 ArrayNArgumentsConstructorStub stub(masm->isolate());
4271 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004272 } else {
4273 UNREACHABLE();
4274 }
4275}
4276
4277
4278void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4279 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004280 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004281 // -- ebx : AllocationSite or undefined
4282 // -- edi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004283 // -- edx : Original constructor
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004284 // -- esp[0] : return address
4285 // -- esp[4] : last argument
4286 // -----------------------------------
4287 if (FLAG_debug_code) {
4288 // The array construct code is only set for the global and natives
4289 // builtin Array functions which always have maps.
4290
4291 // Initial map for the builtin Array function should be a map.
4292 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4293 // Will both indicate a NULL and a Smi.
4294 __ test(ecx, Immediate(kSmiTagMask));
4295 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4296 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4297 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4298
4299 // We should either have undefined in ebx or a valid AllocationSite
4300 __ AssertUndefinedOrAllocationSite(ebx);
4301 }
4302
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004303 Label subclassing;
4304
4305 // Enter the context of the Array function.
4306 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
4307
4308 __ cmp(edx, edi);
4309 __ j(not_equal, &subclassing);
4310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004311 Label no_info;
4312 // If the feedback vector is the undefined value call an array constructor
4313 // that doesn't use AllocationSites.
4314 __ cmp(ebx, isolate()->factory()->undefined_value());
4315 __ j(equal, &no_info);
4316
4317 // Only look at the lower 16 bits of the transition info.
4318 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
4319 __ SmiUntag(edx);
4320 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4321 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
4322 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4323
4324 __ bind(&no_info);
4325 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004326
4327 // Subclassing.
4328 __ bind(&subclassing);
4329 switch (argument_count()) {
4330 case ANY:
4331 case MORE_THAN_ONE:
4332 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
4333 __ add(eax, Immediate(3));
4334 break;
4335 case NONE:
4336 __ mov(Operand(esp, 1 * kPointerSize), edi);
4337 __ mov(eax, Immediate(3));
4338 break;
4339 case ONE:
4340 __ mov(Operand(esp, 2 * kPointerSize), edi);
4341 __ mov(eax, Immediate(4));
4342 break;
4343 }
4344 __ PopReturnAddressTo(ecx);
4345 __ Push(edx);
4346 __ Push(ebx);
4347 __ PushReturnAddressFrom(ecx);
4348 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004349}
4350
4351
4352void InternalArrayConstructorStub::GenerateCase(
4353 MacroAssembler* masm, ElementsKind kind) {
4354 Label not_zero_case, not_one_case;
4355 Label normal_sequence;
4356
4357 __ test(eax, eax);
4358 __ j(not_zero, &not_zero_case);
4359 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4360 __ TailCallStub(&stub0);
4361
4362 __ bind(&not_zero_case);
4363 __ cmp(eax, 1);
4364 __ j(greater, &not_one_case);
4365
4366 if (IsFastPackedElementsKind(kind)) {
4367 // We might need to create a holey array
4368 // look at the first argument
4369 __ mov(ecx, Operand(esp, kPointerSize));
4370 __ test(ecx, ecx);
4371 __ j(zero, &normal_sequence);
4372
4373 InternalArraySingleArgumentConstructorStub
4374 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4375 __ TailCallStub(&stub1_holey);
4376 }
4377
4378 __ bind(&normal_sequence);
4379 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4380 __ TailCallStub(&stub1);
4381
4382 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004383 ArrayNArgumentsConstructorStub stubN(isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004384 __ TailCallStub(&stubN);
4385}
4386
4387
4388void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4389 // ----------- S t a t e -------------
4390 // -- eax : argc
4391 // -- edi : constructor
4392 // -- esp[0] : return address
4393 // -- esp[4] : last argument
4394 // -----------------------------------
4395
4396 if (FLAG_debug_code) {
4397 // The array construct code is only set for the global and natives
4398 // builtin Array functions which always have maps.
4399
4400 // Initial map for the builtin Array function should be a map.
4401 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4402 // Will both indicate a NULL and a Smi.
4403 __ test(ecx, Immediate(kSmiTagMask));
4404 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4405 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4406 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4407 }
4408
4409 // Figure out the right elements kind
4410 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4411
4412 // Load the map's "bit field 2" into |result|. We only need the first byte,
4413 // but the following masking takes care of that anyway.
4414 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
4415 // Retrieve elements_kind from bit field 2.
4416 __ DecodeField<Map::ElementsKindBits>(ecx);
4417
4418 if (FLAG_debug_code) {
4419 Label done;
4420 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4421 __ j(equal, &done);
4422 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
4423 __ Assert(equal,
4424 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4425 __ bind(&done);
4426 }
4427
4428 Label fast_elements_case;
4429 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4430 __ j(equal, &fast_elements_case);
4431 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4432
4433 __ bind(&fast_elements_case);
4434 GenerateCase(masm, FAST_ELEMENTS);
4435}
4436
4437
Ben Murdoch097c5b22016-05-18 11:27:45 +01004438void FastNewObjectStub::Generate(MacroAssembler* masm) {
4439 // ----------- S t a t e -------------
4440 // -- edi : target
4441 // -- edx : new target
4442 // -- esi : context
4443 // -- esp[0] : return address
4444 // -----------------------------------
4445 __ AssertFunction(edi);
4446 __ AssertReceiver(edx);
4447
4448 // Verify that the new target is a JSFunction.
4449 Label new_object;
4450 __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
4451 __ j(not_equal, &new_object);
4452
4453 // Load the initial map and verify that it's in fact a map.
4454 __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
4455 __ JumpIfSmi(ecx, &new_object);
4456 __ CmpObjectType(ecx, MAP_TYPE, ebx);
4457 __ j(not_equal, &new_object);
4458
4459 // Fall back to runtime if the target differs from the new target's
4460 // initial map constructor.
4461 __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
4462 __ j(not_equal, &new_object);
4463
4464 // Allocate the JSObject on the heap.
4465 Label allocate, done_allocate;
4466 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
4467 __ lea(ebx, Operand(ebx, times_pointer_size, 0));
4468 __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4469 __ bind(&done_allocate);
4470
4471 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004472 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
4473 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004474 masm->isolate()->factory()->empty_fixed_array());
Ben Murdochc5610432016-08-08 18:44:38 +01004475 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004476 masm->isolate()->factory()->empty_fixed_array());
4477 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004478 __ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004479
4480 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004481 // -- eax : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004482 // -- ebx : result fields (untagged)
4483 // -- edi : result end (untagged)
4484 // -- ecx : initial map
4485 // -- esi : context
4486 // -- esp[0] : return address
4487 // -----------------------------------
4488
4489 // Perform in-object slack tracking if requested.
4490 Label slack_tracking;
4491 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4492 __ test(FieldOperand(ecx, Map::kBitField3Offset),
4493 Immediate(Map::ConstructionCounter::kMask));
4494 __ j(not_zero, &slack_tracking, Label::kNear);
4495 {
4496 // Initialize all in-object fields with undefined.
4497 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
4498 __ InitializeFieldsWithFiller(ebx, edi, edx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004499 __ Ret();
4500 }
4501 __ bind(&slack_tracking);
4502 {
4503 // Decrease generous allocation count.
4504 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4505 __ sub(FieldOperand(ecx, Map::kBitField3Offset),
4506 Immediate(1 << Map::ConstructionCounter::kShift));
4507
4508 // Initialize the in-object fields with undefined.
4509 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
4510 __ neg(edx);
4511 __ lea(edx, Operand(edi, edx, times_pointer_size, 0));
4512 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
4513 __ InitializeFieldsWithFiller(ebx, edx, edi);
4514
4515 // Initialize the remaining (reserved) fields with one pointer filler map.
4516 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
4517 __ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
4518 __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
4519 __ InitializeFieldsWithFiller(ebx, edx, edi);
4520
Ben Murdoch097c5b22016-05-18 11:27:45 +01004521 // Check if we can finalize the instance size.
4522 Label finalize;
4523 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4524 __ test(FieldOperand(ecx, Map::kBitField3Offset),
4525 Immediate(Map::ConstructionCounter::kMask));
4526 __ j(zero, &finalize, Label::kNear);
4527 __ Ret();
4528
4529 // Finalize the instance size.
4530 __ bind(&finalize);
4531 {
4532 FrameScope scope(masm, StackFrame::INTERNAL);
4533 __ Push(eax);
4534 __ Push(ecx);
4535 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4536 __ Pop(eax);
4537 }
4538 __ Ret();
4539 }
4540
4541 // Fall back to %AllocateInNewSpace.
4542 __ bind(&allocate);
4543 {
4544 FrameScope scope(masm, StackFrame::INTERNAL);
4545 __ SmiTag(ebx);
4546 __ Push(ecx);
4547 __ Push(ebx);
4548 __ CallRuntime(Runtime::kAllocateInNewSpace);
4549 __ Pop(ecx);
4550 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004551 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
4552 __ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01004553 STATIC_ASSERT(kHeapObjectTag == 1);
4554 __ dec(edi);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004555 __ jmp(&done_allocate);
4556
4557 // Fall back to %NewObject.
4558 __ bind(&new_object);
4559 __ PopReturnAddressTo(ecx);
4560 __ Push(edi);
4561 __ Push(edx);
4562 __ PushReturnAddressFrom(ecx);
4563 __ TailCallRuntime(Runtime::kNewObject);
4564}
4565
4566
4567void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4568 // ----------- S t a t e -------------
4569 // -- edi : function
4570 // -- esi : context
4571 // -- ebp : frame pointer
4572 // -- esp[0] : return address
4573 // -----------------------------------
4574 __ AssertFunction(edi);
4575
Ben Murdochc5610432016-08-08 18:44:38 +01004576 // Make edx point to the JavaScript frame.
4577 __ mov(edx, ebp);
4578 if (skip_stub_frame()) {
4579 // For Ignition we need to skip the handler/stub frame to reach the
4580 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004581 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004582 }
4583 if (FLAG_debug_code) {
4584 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004585 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004586 __ j(equal, &ok);
4587 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4588 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004589 }
4590
4591 // Check if we have rest parameters (only possible if we have an
4592 // arguments adaptor frame below the function frame).
4593 Label no_rest_parameters;
4594 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004595 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004596 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4597 __ j(not_equal, &no_rest_parameters, Label::kNear);
4598
4599 // Check if the arguments adaptor frame contains more arguments than
4600 // specified by the function's internal formal parameter count.
4601 Label rest_parameters;
4602 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4603 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4604 __ sub(eax,
4605 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
4606 __ j(greater, &rest_parameters);
4607
4608 // Return an empty rest parameter array.
4609 __ bind(&no_rest_parameters);
4610 {
4611 // ----------- S t a t e -------------
4612 // -- esi : context
4613 // -- esp[0] : return address
4614 // -----------------------------------
4615
4616 // Allocate an empty rest parameter array.
4617 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004618 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004619 __ bind(&done_allocate);
4620
4621 // Setup the rest parameter array in rax.
4622 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
4623 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
4624 __ mov(ecx, isolate()->factory()->empty_fixed_array());
4625 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
4626 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
4627 __ mov(FieldOperand(eax, JSArray::kLengthOffset),
4628 Immediate(Smi::FromInt(0)));
4629 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4630 __ Ret();
4631
4632 // Fall back to %AllocateInNewSpace.
4633 __ bind(&allocate);
4634 {
4635 FrameScope scope(masm, StackFrame::INTERNAL);
4636 __ Push(Smi::FromInt(JSArray::kSize));
4637 __ CallRuntime(Runtime::kAllocateInNewSpace);
4638 }
4639 __ jmp(&done_allocate);
4640 }
4641
4642 __ bind(&rest_parameters);
4643 {
4644 // Compute the pointer to the first rest parameter (skippping the receiver).
4645 __ lea(ebx,
4646 Operand(ebx, eax, times_half_pointer_size,
4647 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4648
4649 // ----------- S t a t e -------------
4650 // -- esi : context
4651 // -- eax : number of rest parameters (tagged)
4652 // -- ebx : pointer to first rest parameters
4653 // -- esp[0] : return address
4654 // -----------------------------------
4655
4656 // Allocate space for the rest parameter array plus the backing store.
4657 Label allocate, done_allocate;
4658 __ lea(ecx, Operand(eax, times_half_pointer_size,
4659 JSArray::kSize + FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01004660 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004661 __ bind(&done_allocate);
4662
4663 // Setup the elements array in edx.
4664 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
4665 isolate()->factory()->fixed_array_map());
4666 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
4667 {
4668 Label loop, done_loop;
4669 __ Move(ecx, Smi::FromInt(0));
4670 __ bind(&loop);
4671 __ cmp(ecx, eax);
4672 __ j(equal, &done_loop, Label::kNear);
4673 __ mov(edi, Operand(ebx, 0 * kPointerSize));
4674 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
4675 FixedArray::kHeaderSize),
4676 edi);
4677 __ sub(ebx, Immediate(1 * kPointerSize));
4678 __ add(ecx, Immediate(Smi::FromInt(1)));
4679 __ jmp(&loop);
4680 __ bind(&done_loop);
4681 }
4682
4683 // Setup the rest parameter array in edi.
4684 __ lea(edi,
4685 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
4686 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
4687 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
4688 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
4689 isolate()->factory()->empty_fixed_array());
4690 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
4691 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
4692 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4693 __ mov(eax, edi);
4694 __ Ret();
4695
Ben Murdoch61f157c2016-09-16 13:49:30 +01004696 // Fall back to %AllocateInNewSpace (if not too big).
4697 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004698 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004699 __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
4700 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004701 {
4702 FrameScope scope(masm, StackFrame::INTERNAL);
4703 __ SmiTag(ecx);
4704 __ Push(eax);
4705 __ Push(ebx);
4706 __ Push(ecx);
4707 __ CallRuntime(Runtime::kAllocateInNewSpace);
4708 __ mov(edx, eax);
4709 __ Pop(ebx);
4710 __ Pop(eax);
4711 }
4712 __ jmp(&done_allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004713
4714 // Fall back to %NewRestParameter.
4715 __ bind(&too_big_for_new_space);
4716 __ PopReturnAddressTo(ecx);
4717 // We reload the function from the caller frame due to register pressure
4718 // within this stub. This is the slow path, hence reloading is preferable.
4719 if (skip_stub_frame()) {
4720 // For Ignition we need to skip the handler/stub frame to reach the
4721 // JavaScript frame for the function.
4722 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4723 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
4724 } else {
4725 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
4726 }
4727 __ PushReturnAddressFrom(ecx);
4728 __ TailCallRuntime(Runtime::kNewRestParameter);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004729 }
4730}
4731
4732
4733void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4734 // ----------- S t a t e -------------
4735 // -- edi : function
4736 // -- esi : context
4737 // -- ebp : frame pointer
4738 // -- esp[0] : return address
4739 // -----------------------------------
4740 __ AssertFunction(edi);
4741
Ben Murdochc5610432016-08-08 18:44:38 +01004742 // Make ecx point to the JavaScript frame.
4743 __ mov(ecx, ebp);
4744 if (skip_stub_frame()) {
4745 // For Ignition we need to skip the handler/stub frame to reach the
4746 // JavaScript frame for the function.
4747 __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4748 }
4749 if (FLAG_debug_code) {
4750 Label ok;
4751 __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
4752 __ j(equal, &ok);
4753 __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
4754 __ bind(&ok);
4755 }
4756
Ben Murdoch097c5b22016-05-18 11:27:45 +01004757 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
Ben Murdochc5610432016-08-08 18:44:38 +01004758 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4759 __ mov(ebx,
4760 FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
4761 __ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
Ben Murdoch097c5b22016-05-18 11:27:45 +01004762 StandardFrameConstants::kCallerSPOffset));
4763
Ben Murdochc5610432016-08-08 18:44:38 +01004764 // ebx : number of parameters (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004765 // edx : parameters pointer
4766 // edi : function
Ben Murdochc5610432016-08-08 18:44:38 +01004767 // ecx : JavaScript frame pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004768 // esp[0] : return address
4769
4770 // Check if the calling frame is an arguments adaptor frame.
4771 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004772 __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4773 __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004774 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4775 __ j(equal, &adaptor_frame, Label::kNear);
4776
4777 // No adaptor, parameter count = argument count.
Ben Murdochc5610432016-08-08 18:44:38 +01004778 __ mov(ecx, ebx);
4779 __ push(ebx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004780 __ jmp(&try_allocate, Label::kNear);
4781
4782 // We have an adaptor frame. Patch the parameters pointer.
4783 __ bind(&adaptor_frame);
Ben Murdochc5610432016-08-08 18:44:38 +01004784 __ push(ebx);
4785 __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004786 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4787 __ lea(edx, Operand(edx, ecx, times_2,
4788 StandardFrameConstants::kCallerSPOffset));
4789
4790 // ebx = parameter count (tagged)
4791 // ecx = argument count (smi-tagged)
4792 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
4793 __ cmp(ebx, ecx);
4794 __ j(less_equal, &try_allocate, Label::kNear);
4795 __ mov(ebx, ecx);
4796
4797 // Save mapped parameter count and function.
4798 __ bind(&try_allocate);
4799 __ push(edi);
4800 __ push(ebx);
4801
4802 // Compute the sizes of backing store, parameter map, and arguments object.
4803 // 1. Parameter map, has 2 extra words containing context and backing store.
4804 const int kParameterMapHeaderSize =
4805 FixedArray::kHeaderSize + 2 * kPointerSize;
4806 Label no_parameter_map;
4807 __ test(ebx, ebx);
4808 __ j(zero, &no_parameter_map, Label::kNear);
4809 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
4810 __ bind(&no_parameter_map);
4811
4812 // 2. Backing store.
4813 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
4814
4815 // 3. Arguments object.
4816 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
4817
4818 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004819 __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004820
4821 // eax = address of new object(s) (tagged)
4822 // ecx = argument count (smi-tagged)
4823 // esp[0] = mapped parameter count (tagged)
4824 // esp[4] = function
4825 // esp[8] = parameter count (tagged)
4826 // Get the arguments map from the current native context into edi.
4827 Label has_mapped_parameters, instantiate;
4828 __ mov(edi, NativeContextOperand());
4829 __ mov(ebx, Operand(esp, 0 * kPointerSize));
4830 __ test(ebx, ebx);
4831 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4832 __ mov(
4833 edi,
4834 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
4835 __ jmp(&instantiate, Label::kNear);
4836
4837 __ bind(&has_mapped_parameters);
4838 __ mov(edi, Operand(edi, Context::SlotOffset(
4839 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
4840 __ bind(&instantiate);
4841
4842 // eax = address of new object (tagged)
4843 // ebx = mapped parameter count (tagged)
4844 // ecx = argument count (smi-tagged)
4845 // edi = address of arguments map (tagged)
4846 // esp[0] = mapped parameter count (tagged)
4847 // esp[4] = function
4848 // esp[8] = parameter count (tagged)
4849 // Copy the JS object part.
4850 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
4851 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
4852 masm->isolate()->factory()->empty_fixed_array());
4853 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
4854 masm->isolate()->factory()->empty_fixed_array());
4855
4856 // Set up the callee in-object property.
4857 STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
4858 __ mov(edi, Operand(esp, 1 * kPointerSize));
4859 __ AssertNotSmi(edi);
4860 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
4861
4862 // Use the length (smi tagged) and set that as an in-object property too.
4863 __ AssertSmi(ecx);
4864 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
4865
4866 // Set up the elements pointer in the allocated arguments object.
4867 // If we allocated a parameter map, edi will point there, otherwise to the
4868 // backing store.
4869 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
4870 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
4871
4872 // eax = address of new object (tagged)
4873 // ebx = mapped parameter count (tagged)
4874 // ecx = argument count (tagged)
4875 // edx = address of receiver argument
4876 // edi = address of parameter map or backing store (tagged)
4877 // esp[0] = mapped parameter count (tagged)
4878 // esp[4] = function
4879 // esp[8] = parameter count (tagged)
4880 // Free two registers.
4881 __ push(edx);
4882 __ push(eax);
4883
4884 // Initialize parameter map. If there are no mapped arguments, we're done.
4885 Label skip_parameter_map;
4886 __ test(ebx, ebx);
4887 __ j(zero, &skip_parameter_map);
4888
4889 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4890 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
4891 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
4892 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
4893 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
4894 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
4895 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
4896
4897 // Copy the parameter slots and the holes in the arguments.
4898 // We need to fill in mapped_parameter_count slots. They index the context,
4899 // where parameters are stored in reverse order, at
4900 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4901 // The mapped parameter thus need to get indices
4902 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4903 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4904 // We loop from right to left.
4905 Label parameters_loop, parameters_test;
4906 __ push(ecx);
4907 __ mov(eax, Operand(esp, 3 * kPointerSize));
4908 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4909 __ add(ebx, Operand(esp, 5 * kPointerSize));
4910 __ sub(ebx, eax);
4911 __ mov(ecx, isolate()->factory()->the_hole_value());
4912 __ mov(edx, edi);
4913 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
4914 // eax = loop variable (tagged)
4915 // ebx = mapping index (tagged)
4916 // ecx = the hole value
4917 // edx = address of parameter map (tagged)
4918 // edi = address of backing store (tagged)
4919 // esp[0] = argument count (tagged)
4920 // esp[4] = address of new object (tagged)
4921 // esp[8] = address of receiver argument
4922 // esp[12] = mapped parameter count (tagged)
4923 // esp[16] = function
4924 // esp[20] = parameter count (tagged)
4925 __ jmp(&parameters_test, Label::kNear);
4926
4927 __ bind(&parameters_loop);
4928 __ sub(eax, Immediate(Smi::FromInt(1)));
4929 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
4930 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
4931 __ add(ebx, Immediate(Smi::FromInt(1)));
4932 __ bind(&parameters_test);
4933 __ test(eax, eax);
4934 __ j(not_zero, &parameters_loop, Label::kNear);
4935 __ pop(ecx);
4936
4937 __ bind(&skip_parameter_map);
4938
4939 // ecx = argument count (tagged)
4940 // edi = address of backing store (tagged)
4941 // esp[0] = address of new object (tagged)
4942 // esp[4] = address of receiver argument
4943 // esp[8] = mapped parameter count (tagged)
4944 // esp[12] = function
4945 // esp[16] = parameter count (tagged)
4946 // Copy arguments header and remaining slots (if there are any).
4947 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4948 Immediate(isolate()->factory()->fixed_array_map()));
4949 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
4950
4951 Label arguments_loop, arguments_test;
4952 __ mov(ebx, Operand(esp, 2 * kPointerSize));
4953 __ mov(edx, Operand(esp, 1 * kPointerSize));
4954 __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
4955 __ sub(edx, ebx);
4956 __ jmp(&arguments_test, Label::kNear);
4957
4958 __ bind(&arguments_loop);
4959 __ sub(edx, Immediate(kPointerSize));
4960 __ mov(eax, Operand(edx, 0));
4961 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
4962 __ add(ebx, Immediate(Smi::FromInt(1)));
4963
4964 __ bind(&arguments_test);
4965 __ cmp(ebx, ecx);
4966 __ j(less, &arguments_loop, Label::kNear);
4967
4968 // Restore.
4969 __ pop(eax); // Address of arguments object.
4970 __ Drop(4);
4971
4972 // Return.
4973 __ ret(0);
4974
4975 // Do the runtime call to allocate the arguments object.
4976 __ bind(&runtime);
4977 __ pop(eax); // Remove saved mapped parameter count.
4978 __ pop(edi); // Pop saved function.
4979 __ pop(eax); // Remove saved parameter count.
4980 __ pop(eax); // Pop return address.
4981 __ push(edi); // Push function.
4982 __ push(edx); // Push parameters pointer.
4983 __ push(ecx); // Push parameter count.
4984 __ push(eax); // Push return address.
4985 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4986}
4987
4988
4989void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4990 // ----------- S t a t e -------------
4991 // -- edi : function
4992 // -- esi : context
4993 // -- ebp : frame pointer
4994 // -- esp[0] : return address
4995 // -----------------------------------
4996 __ AssertFunction(edi);
4997
Ben Murdochc5610432016-08-08 18:44:38 +01004998 // Make edx point to the JavaScript frame.
4999 __ mov(edx, ebp);
5000 if (skip_stub_frame()) {
5001 // For Ignition we need to skip the handler/stub frame to reach the
5002 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005003 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005004 }
5005 if (FLAG_debug_code) {
5006 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01005007 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005008 __ j(equal, &ok);
5009 __ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
5010 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005011 }
5012
5013 // Check if we have an arguments adaptor frame below the function frame.
5014 Label arguments_adaptor, arguments_done;
5015 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005016 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01005017 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5018 __ j(equal, &arguments_adaptor, Label::kNear);
5019 {
5020 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
5021 __ mov(eax,
5022 FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
5023 __ lea(ebx,
5024 Operand(edx, eax, times_half_pointer_size,
5025 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
5026 }
5027 __ jmp(&arguments_done, Label::kNear);
5028 __ bind(&arguments_adaptor);
5029 {
5030 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
5031 __ lea(ebx,
5032 Operand(ebx, eax, times_half_pointer_size,
5033 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
5034 }
5035 __ bind(&arguments_done);
5036
5037 // ----------- S t a t e -------------
5038 // -- eax : number of arguments (tagged)
5039 // -- ebx : pointer to the first argument
5040 // -- esi : context
5041 // -- esp[0] : return address
5042 // -----------------------------------
5043
5044 // Allocate space for the strict arguments object plus the backing store.
5045 Label allocate, done_allocate;
5046 __ lea(ecx,
5047 Operand(eax, times_half_pointer_size,
5048 JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01005049 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005050 __ bind(&done_allocate);
5051
5052 // Setup the elements array in edx.
5053 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
5054 isolate()->factory()->fixed_array_map());
5055 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
5056 {
5057 Label loop, done_loop;
5058 __ Move(ecx, Smi::FromInt(0));
5059 __ bind(&loop);
5060 __ cmp(ecx, eax);
5061 __ j(equal, &done_loop, Label::kNear);
5062 __ mov(edi, Operand(ebx, 0 * kPointerSize));
5063 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
5064 FixedArray::kHeaderSize),
5065 edi);
5066 __ sub(ebx, Immediate(1 * kPointerSize));
5067 __ add(ecx, Immediate(Smi::FromInt(1)));
5068 __ jmp(&loop);
5069 __ bind(&done_loop);
5070 }
5071
5072 // Setup the rest parameter array in edi.
5073 __ lea(edi,
5074 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
5075 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
5076 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
5077 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
5078 isolate()->factory()->empty_fixed_array());
5079 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
5080 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
5081 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5082 __ mov(eax, edi);
5083 __ Ret();
5084
Ben Murdoch61f157c2016-09-16 13:49:30 +01005085 // Fall back to %AllocateInNewSpace (if not too big).
5086 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005087 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01005088 __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
5089 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005090 {
5091 FrameScope scope(masm, StackFrame::INTERNAL);
5092 __ SmiTag(ecx);
5093 __ Push(eax);
5094 __ Push(ebx);
5095 __ Push(ecx);
5096 __ CallRuntime(Runtime::kAllocateInNewSpace);
5097 __ mov(edx, eax);
5098 __ Pop(ebx);
5099 __ Pop(eax);
5100 }
5101 __ jmp(&done_allocate);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005102
Ben Murdoch61f157c2016-09-16 13:49:30 +01005103 // Fall back to %NewStrictArguments.
5104 __ bind(&too_big_for_new_space);
5105 __ PopReturnAddressTo(ecx);
5106 // We reload the function from the caller frame due to register pressure
5107 // within this stub. This is the slow path, hence reloading is preferable.
5108 if (skip_stub_frame()) {
5109 // For Ignition we need to skip the handler/stub frame to reach the
5110 // JavaScript frame for the function.
5111 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
5112 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
5113 } else {
5114 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005115 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01005116 __ PushReturnAddressFrom(ecx);
5117 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005118}
5119
5120
5121void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5122 Register context_reg = esi;
5123 Register slot_reg = ebx;
5124 Register value_reg = eax;
5125 Register cell_reg = edi;
5126 Register cell_details_reg = edx;
5127 Register cell_value_reg = ecx;
5128 Label fast_heapobject_case, fast_smi_case, slow_case;
5129
5130 if (FLAG_debug_code) {
5131 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
5132 __ Check(not_equal, kUnexpectedValue);
5133 }
5134
5135 // Go up context chain to the script context.
5136 for (int i = 0; i < depth(); ++i) {
5137 __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5138 context_reg = cell_reg;
5139 }
5140
5141 // Load the PropertyCell at the specified slot.
5142 __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
5143
5144 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5145 __ mov(cell_details_reg,
5146 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
5147 __ SmiUntag(cell_details_reg);
5148 __ and_(cell_details_reg,
5149 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5150 PropertyDetails::KindField::kMask |
5151 PropertyDetails::kAttributesReadOnlyMask));
5152
5153 // Check if PropertyCell holds mutable data.
5154 Label not_mutable_data;
5155 __ cmp(cell_details_reg,
5156 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5157 PropertyCellType::kMutable) |
5158 PropertyDetails::KindField::encode(kData)));
5159 __ j(not_equal, &not_mutable_data);
5160 __ JumpIfSmi(value_reg, &fast_smi_case);
5161 __ bind(&fast_heapobject_case);
5162 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5163 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5164 cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5165 OMIT_SMI_CHECK);
5166 // RecordWriteField clobbers the value register, so we need to reload.
5167 __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5168 __ Ret();
5169 __ bind(&not_mutable_data);
5170
5171 // Check if PropertyCell value matches the new value (relevant for Constant,
5172 // ConstantType and Undefined cells).
5173 Label not_same_value;
5174 __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5175 __ cmp(cell_value_reg, value_reg);
5176 __ j(not_equal, &not_same_value,
5177 FLAG_debug_code ? Label::kFar : Label::kNear);
5178 // Make sure the PropertyCell is not marked READ_ONLY.
5179 __ test(cell_details_reg,
5180 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5181 __ j(not_zero, &slow_case);
5182 if (FLAG_debug_code) {
5183 Label done;
5184 // This can only be true for Constant, ConstantType and Undefined cells,
5185 // because we never store the_hole via this stub.
5186 __ cmp(cell_details_reg,
5187 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5188 PropertyCellType::kConstant) |
5189 PropertyDetails::KindField::encode(kData)));
5190 __ j(equal, &done);
5191 __ cmp(cell_details_reg,
5192 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5193 PropertyCellType::kConstantType) |
5194 PropertyDetails::KindField::encode(kData)));
5195 __ j(equal, &done);
5196 __ cmp(cell_details_reg,
5197 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5198 PropertyCellType::kUndefined) |
5199 PropertyDetails::KindField::encode(kData)));
5200 __ Check(equal, kUnexpectedValue);
5201 __ bind(&done);
5202 }
5203 __ Ret();
5204 __ bind(&not_same_value);
5205
5206 // Check if PropertyCell contains data with constant type (and is not
5207 // READ_ONLY).
5208 __ cmp(cell_details_reg,
5209 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5210 PropertyCellType::kConstantType) |
5211 PropertyDetails::KindField::encode(kData)));
5212 __ j(not_equal, &slow_case, Label::kNear);
5213
5214 // Now either both old and new values must be SMIs or both must be heap
5215 // objects with same map.
5216 Label value_is_heap_object;
5217 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5218 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5219 // Old and new values are SMIs, no need for a write barrier here.
5220 __ bind(&fast_smi_case);
5221 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5222 __ Ret();
5223 __ bind(&value_is_heap_object);
5224 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5225 Register cell_value_map_reg = cell_value_reg;
5226 __ mov(cell_value_map_reg,
5227 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5228 __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5229 __ j(equal, &fast_heapobject_case);
5230
5231 // Fallback to the runtime.
5232 __ bind(&slow_case);
5233 __ SmiTag(slot_reg);
5234 __ Pop(cell_reg); // Pop return address.
5235 __ Push(slot_reg);
5236 __ Push(value_reg);
5237 __ Push(cell_reg); // Push return address.
5238 __ TailCallRuntime(is_strict(language_mode())
5239 ? Runtime::kStoreGlobalViaContext_Strict
5240 : Runtime::kStoreGlobalViaContext_Sloppy);
5241}
5242
5243
5244// Generates an Operand for saving parameters after PrepareCallApiFunction.
5245static Operand ApiParameterOperand(int index) {
5246 return Operand(esp, index * kPointerSize);
5247}
5248
5249
5250// Prepares stack to put arguments (aligns and so on). Reserves
5251// space for return value if needed (assumes the return value is a handle).
5252// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
5253// etc. Saves context (esi). If space was reserved for return value then
5254// stores the pointer to the reserved slot into esi.
5255static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
5256 __ EnterApiExitFrame(argc);
5257 if (__ emit_debug_code()) {
5258 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
5259 }
5260}
5261
5262
5263// Calls an API function. Allocates HandleScope, extracts returned value
5264// from handle and propagates exceptions. Clobbers ebx, edi and
5265// caller-save registers. Restores context. On return removes
5266// stack_space * kPointerSize (GCed).
5267static void CallApiFunctionAndReturn(MacroAssembler* masm,
5268 Register function_address,
5269 ExternalReference thunk_ref,
5270 Operand thunk_last_arg, int stack_space,
5271 Operand* stack_space_operand,
5272 Operand return_value_operand,
5273 Operand* context_restore_operand) {
5274 Isolate* isolate = masm->isolate();
5275
5276 ExternalReference next_address =
5277 ExternalReference::handle_scope_next_address(isolate);
5278 ExternalReference limit_address =
5279 ExternalReference::handle_scope_limit_address(isolate);
5280 ExternalReference level_address =
5281 ExternalReference::handle_scope_level_address(isolate);
5282
5283 DCHECK(edx.is(function_address));
5284 // Allocate HandleScope in callee-save registers.
5285 __ mov(ebx, Operand::StaticVariable(next_address));
5286 __ mov(edi, Operand::StaticVariable(limit_address));
5287 __ add(Operand::StaticVariable(level_address), Immediate(1));
5288
5289 if (FLAG_log_timer_events) {
5290 FrameScope frame(masm, StackFrame::MANUAL);
5291 __ PushSafepointRegisters();
5292 __ PrepareCallCFunction(1, eax);
5293 __ mov(Operand(esp, 0),
5294 Immediate(ExternalReference::isolate_address(isolate)));
5295 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5296 1);
5297 __ PopSafepointRegisters();
5298 }
5299
5300
5301 Label profiler_disabled;
5302 Label end_profiler_check;
5303 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
Ben Murdochda12d292016-06-02 14:46:10 +01005304 __ cmpb(Operand(eax, 0), Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005305 __ j(zero, &profiler_disabled);
5306
5307 // Additional parameter is the address of the actual getter function.
5308 __ mov(thunk_last_arg, function_address);
5309 // Call the api function.
5310 __ mov(eax, Immediate(thunk_ref));
5311 __ call(eax);
5312 __ jmp(&end_profiler_check);
5313
5314 __ bind(&profiler_disabled);
5315 // Call the api function.
5316 __ call(function_address);
5317 __ bind(&end_profiler_check);
5318
5319 if (FLAG_log_timer_events) {
5320 FrameScope frame(masm, StackFrame::MANUAL);
5321 __ PushSafepointRegisters();
5322 __ PrepareCallCFunction(1, eax);
5323 __ mov(Operand(esp, 0),
5324 Immediate(ExternalReference::isolate_address(isolate)));
5325 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5326 1);
5327 __ PopSafepointRegisters();
5328 }
5329
5330 Label prologue;
5331 // Load the value from ReturnValue
5332 __ mov(eax, return_value_operand);
5333
5334 Label promote_scheduled_exception;
5335 Label delete_allocated_handles;
5336 Label leave_exit_frame;
5337
5338 __ bind(&prologue);
5339 // No more valid handles (the result handle was the last one). Restore
5340 // previous handle scope.
5341 __ mov(Operand::StaticVariable(next_address), ebx);
5342 __ sub(Operand::StaticVariable(level_address), Immediate(1));
5343 __ Assert(above_equal, kInvalidHandleScopeLevel);
5344 __ cmp(edi, Operand::StaticVariable(limit_address));
5345 __ j(not_equal, &delete_allocated_handles);
5346
5347 // Leave the API exit frame.
5348 __ bind(&leave_exit_frame);
5349 bool restore_context = context_restore_operand != NULL;
5350 if (restore_context) {
5351 __ mov(esi, *context_restore_operand);
5352 }
5353 if (stack_space_operand != nullptr) {
5354 __ mov(ebx, *stack_space_operand);
5355 }
5356 __ LeaveApiExitFrame(!restore_context);
5357
5358 // Check if the function scheduled an exception.
5359 ExternalReference scheduled_exception_address =
5360 ExternalReference::scheduled_exception_address(isolate);
5361 __ cmp(Operand::StaticVariable(scheduled_exception_address),
5362 Immediate(isolate->factory()->the_hole_value()));
5363 __ j(not_equal, &promote_scheduled_exception);
5364
5365#if DEBUG
5366 // Check if the function returned a valid JavaScript value.
5367 Label ok;
5368 Register return_value = eax;
5369 Register map = ecx;
5370
5371 __ JumpIfSmi(return_value, &ok, Label::kNear);
5372 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
5373
5374 __ CmpInstanceType(map, LAST_NAME_TYPE);
5375 __ j(below_equal, &ok, Label::kNear);
5376
5377 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5378 __ j(above_equal, &ok, Label::kNear);
5379
5380 __ cmp(map, isolate->factory()->heap_number_map());
5381 __ j(equal, &ok, Label::kNear);
5382
5383 __ cmp(return_value, isolate->factory()->undefined_value());
5384 __ j(equal, &ok, Label::kNear);
5385
5386 __ cmp(return_value, isolate->factory()->true_value());
5387 __ j(equal, &ok, Label::kNear);
5388
5389 __ cmp(return_value, isolate->factory()->false_value());
5390 __ j(equal, &ok, Label::kNear);
5391
5392 __ cmp(return_value, isolate->factory()->null_value());
5393 __ j(equal, &ok, Label::kNear);
5394
5395 __ Abort(kAPICallReturnedInvalidObject);
5396
5397 __ bind(&ok);
5398#endif
5399
5400 if (stack_space_operand != nullptr) {
5401 DCHECK_EQ(0, stack_space);
5402 __ pop(ecx);
5403 __ add(esp, ebx);
5404 __ jmp(ecx);
5405 } else {
5406 __ ret(stack_space * kPointerSize);
5407 }
5408
5409 // Re-throw by promoting a scheduled exception.
5410 __ bind(&promote_scheduled_exception);
5411 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5412
5413 // HandleScope limit has changed. Delete allocated extensions.
5414 ExternalReference delete_extensions =
5415 ExternalReference::delete_handle_scope_extensions(isolate);
5416 __ bind(&delete_allocated_handles);
5417 __ mov(Operand::StaticVariable(limit_address), edi);
5418 __ mov(edi, eax);
5419 __ mov(Operand(esp, 0),
5420 Immediate(ExternalReference::isolate_address(isolate)));
5421 __ mov(eax, Immediate(delete_extensions));
5422 __ call(eax);
5423 __ mov(eax, edi);
5424 __ jmp(&leave_exit_frame);
5425}
5426
Ben Murdochda12d292016-06-02 14:46:10 +01005427void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005428 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005429 // -- edi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005430 // -- ebx : call_data
5431 // -- ecx : holder
5432 // -- edx : api_function_address
5433 // -- esi : context
5434 // --
5435 // -- esp[0] : return address
5436 // -- esp[4] : last argument
5437 // -- ...
5438 // -- esp[argc * 4] : first argument
5439 // -- esp[(argc + 1) * 4] : receiver
5440 // -----------------------------------
5441
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005442 Register callee = edi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005443 Register call_data = ebx;
5444 Register holder = ecx;
5445 Register api_function_address = edx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005446 Register context = esi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005447 Register return_address = eax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005448
5449 typedef FunctionCallbackArguments FCA;
5450
5451 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5452 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5453 STATIC_ASSERT(FCA::kDataIndex == 4);
5454 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5455 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5456 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5457 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005458 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5459 STATIC_ASSERT(FCA::kArgsLength == 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005460
Ben Murdochda12d292016-06-02 14:46:10 +01005461 __ pop(return_address);
Ben Murdochc5610432016-08-08 18:44:38 +01005462
5463 // new target
5464 __ PushRoot(Heap::kUndefinedValueRootIndex);
5465
Ben Murdochda12d292016-06-02 14:46:10 +01005466 // context save.
5467 __ push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005468
5469 // callee
5470 __ push(callee);
5471
5472 // call data
5473 __ push(call_data);
5474
5475 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005476 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005477 // return value
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005478 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005479 // return value default
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005480 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005481 } else {
5482 // return value
5483 __ push(scratch);
5484 // return value default
5485 __ push(scratch);
5486 }
5487 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005488 __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005489 // holder
5490 __ push(holder);
5491
5492 __ mov(scratch, esp);
5493
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005494 // push return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005495 __ push(return_address);
5496
Ben Murdochda12d292016-06-02 14:46:10 +01005497 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005498 // load context from callee
5499 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5500 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005501
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005502 // API function gets reference to the v8::Arguments. If CPU profiler
5503 // is enabled wrapper function will be called and we need to pass
5504 // address of the callback as additional parameter, always allocate
5505 // space for it.
5506 const int kApiArgc = 1 + 1;
5507
5508 // Allocate the v8::Arguments structure in the arguments' space since
5509 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005510 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005512 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005513
5514 // FunctionCallbackInfo::implicit_args_.
5515 __ mov(ApiParameterOperand(2), scratch);
Ben Murdochda12d292016-06-02 14:46:10 +01005516 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
5517 // FunctionCallbackInfo::values_.
5518 __ mov(ApiParameterOperand(3), scratch);
5519 // FunctionCallbackInfo::length_.
5520 __ Move(ApiParameterOperand(4), Immediate(argc()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521
5522 // v8::InvocationCallback's argument.
5523 __ lea(scratch, ApiParameterOperand(2));
5524 __ mov(ApiParameterOperand(0), scratch);
5525
5526 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005527 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005528
5529 Operand context_restore_operand(ebp,
5530 (2 + FCA::kContextSaveIndex) * kPointerSize);
5531 // Stores return the first js argument
5532 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005533 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005534 return_value_offset = 2 + FCA::kArgsLength;
5535 } else {
5536 return_value_offset = 2 + FCA::kReturnValueOffset;
5537 }
5538 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005539 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005540 Operand length_operand = ApiParameterOperand(4);
5541 Operand* stack_space_operand = &length_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005542 stack_space = argc() + FCA::kArgsLength + 1;
5543 stack_space_operand = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005544 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5545 ApiParameterOperand(1), stack_space,
5546 stack_space_operand, return_value_operand,
5547 &context_restore_operand);
5548}
5549
5550
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005551void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01005552 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5553 // name below the exit frame to make GC aware of them.
5554 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5555 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5556 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5557 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5558 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5559 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5560 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5561 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
5562
5563 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5564 Register holder = ApiGetterDescriptor::HolderRegister();
5565 Register callback = ApiGetterDescriptor::CallbackRegister();
5566 Register scratch = ebx;
5567 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5568
5569 __ pop(scratch); // Pop return address to extend the frame.
5570 __ push(receiver);
5571 __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
5572 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue
5573 // ReturnValue default value
5574 __ PushRoot(Heap::kUndefinedValueRootIndex);
5575 __ push(Immediate(ExternalReference::isolate_address(isolate())));
5576 __ push(holder);
5577 __ push(Immediate(Smi::FromInt(0))); // should_throw_on_error -> false
5578 __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
5579 __ push(scratch); // Restore return address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005580
Ben Murdoch097c5b22016-05-18 11:27:45 +01005581 // v8::PropertyCallbackInfo::args_ array and name handle.
5582 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5583
5584 // Allocate v8::PropertyCallbackInfo object, arguments for callback and
5585 // space for optional callback address parameter (in case CPU profiler is
5586 // active) in non-GCed stack space.
5587 const int kApiArgc = 3 + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005588
Ben Murdoch097c5b22016-05-18 11:27:45 +01005589 // Load address of v8::PropertyAccessorInfo::args_ array.
5590 __ lea(scratch, Operand(esp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005591
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005592 PrepareCallApiFunction(masm, kApiArgc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005593 // Create v8::PropertyCallbackInfo object on the stack and initialize
5594 // it's args_ field.
5595 Operand info_object = ApiParameterOperand(3);
5596 __ mov(info_object, scratch);
5597
Ben Murdochc5610432016-08-08 18:44:38 +01005598 // Name as handle.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005599 __ sub(scratch, Immediate(kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01005600 __ mov(ApiParameterOperand(0), scratch);
5601 // Arguments pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005602 __ lea(scratch, info_object);
Ben Murdochc5610432016-08-08 18:44:38 +01005603 __ mov(ApiParameterOperand(1), scratch);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005604 // Reserve space for optional callback address parameter.
5605 Operand thunk_last_arg = ApiParameterOperand(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005606
5607 ExternalReference thunk_ref =
5608 ExternalReference::invoke_accessor_getter_callback(isolate());
5609
Ben Murdochc5610432016-08-08 18:44:38 +01005610 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
5611 Register function_address = edx;
5612 __ mov(function_address,
5613 FieldOperand(scratch, Foreign::kForeignAddressOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005614 // +3 is to skip prolog, return address and name handle.
5615 Operand return_value_operand(
5616 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01005617 CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
5618 kStackUnwindSpace, nullptr, return_value_operand,
5619 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005620}
5621
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005622#undef __
5623
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005624} // namespace internal
5625} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005626
5627#endif // V8_TARGET_ARCH_IA32