blob: 6e597e2814cfd57665c55eb403871305350b1848 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_IA32
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/bootstrapper.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/ia32/code-stubs-ia32.h"
12#include "src/ia32/frames-ia32.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013#include "src/ic/handler-compiler.h"
14#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000016#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017#include "src/regexp/jsregexp.h"
18#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040019#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010020
21namespace v8 {
22namespace internal {
23
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024
25static void InitializeArrayConstructorDescriptor(
26 Isolate* isolate, CodeStubDescriptor* descriptor,
27 int constant_stack_parameter_count) {
28 // register state
29 // eax -- number of arguments
30 // edi -- function
31 // ebx -- allocation site with elements kind
32 Address deopt_handler = Runtime::FunctionForId(
33 Runtime::kArrayConstructor)->entry;
34
35 if (constant_stack_parameter_count == 0) {
36 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
37 JS_FUNCTION_STUB_MODE);
38 } else {
39 descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 }
42}
43
44
45static void InitializeInternalArrayConstructorDescriptor(
46 Isolate* isolate, CodeStubDescriptor* descriptor,
47 int constant_stack_parameter_count) {
48 // register state
49 // eax -- number of arguments
50 // edi -- constructor function
51 Address deopt_handler = Runtime::FunctionForId(
52 Runtime::kInternalArrayConstructor)->entry;
53
54 if (constant_stack_parameter_count == 0) {
55 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
56 JS_FUNCTION_STUB_MODE);
57 } else {
58 descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 }
61}
62
63
64void ArrayNoArgumentConstructorStub::InitializeDescriptor(
65 CodeStubDescriptor* descriptor) {
66 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
67}
68
69
70void ArraySingleArgumentConstructorStub::InitializeDescriptor(
71 CodeStubDescriptor* descriptor) {
72 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
73}
74
75
76void ArrayNArgumentsConstructorStub::InitializeDescriptor(
77 CodeStubDescriptor* descriptor) {
78 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
79}
80
81
82void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
83 CodeStubDescriptor* descriptor) {
84 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
85}
86
87
88void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
89 CodeStubDescriptor* descriptor) {
90 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
91}
92
93
94void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
95 CodeStubDescriptor* descriptor) {
96 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
97}
98
99
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100100#define __ ACCESS_MASM(masm)
Steve Block1e0659c2011-05-24 12:43:12 +0100101
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102
103void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
104 ExternalReference miss) {
105 // Update the static counter each time a new code stub is generated.
106 isolate()->counters()->code_stubs()->Increment();
107
108 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000110 {
111 // Call the runtime system in a fresh internal frame.
112 FrameScope scope(masm, StackFrame::INTERNAL);
113 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 eax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115 // Push arguments
116 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000117 __ push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 }
119 __ CallExternalReference(miss, param_count);
120 }
121
Steve Block1e0659c2011-05-24 12:43:12 +0100122 __ ret(0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000123}
124
125
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100126void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
127 // We don't allow a GC during a store buffer overflow so there is no need to
128 // store the registers in any particular way, but we do have to store and
129 // restore them.
130 __ pushad();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 if (save_doubles()) {
132 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
133 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100134 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 __ movsd(Operand(esp, i * kDoubleSize), reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100136 }
137 }
138 const int argument_count = 1;
139
140 AllowExternalCallThatCantCauseGC scope(masm);
141 __ PrepareCallCFunction(argument_count, ecx);
142 __ mov(Operand(esp, 0 * kPointerSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 Immediate(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100144 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100146 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 if (save_doubles()) {
148 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100149 XMMRegister reg = XMMRegister::from_code(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 __ movsd(reg, Operand(esp, i * kDoubleSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100151 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100153 }
154 __ popad();
155 __ ret(0);
156}
157
158
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100159class FloatingPointHelper : public AllStatic {
160 public:
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100161 enum ArgLocation {
162 ARGS_ON_STACK,
163 ARGS_IN_REGISTERS
164 };
165
166 // Code pattern for loading a floating point value. Input value must
167 // be either a smi or a heap number object (fp value). Requirements:
168 // operand in register number. Returns operand as floating point number
169 // on FPU stack.
170 static void LoadFloatOperand(MacroAssembler* masm, Register number);
171
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100172 // Test if operands are smi or number objects (fp). Requirements:
173 // operand_1 in eax, operand_2 in edx; falls through on float
174 // operands, jumps to the non_float label otherwise.
175 static void CheckFloatOperands(MacroAssembler* masm,
176 Label* non_float,
177 Register scratch);
178
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100179 // Test if operands are numbers (smi or HeapNumber objects), and load
180 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
181 // either operand is not a number. Operands are in edx and eax.
182 // Leaves operands unchanged.
183 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100184};
185
186
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187void DoubleToIStub::Generate(MacroAssembler* masm) {
188 Register input_reg = this->source();
189 Register final_result_reg = this->destination();
190 DCHECK(is_truncating());
191
192 Label check_negative, process_64_bits, done, done_no_stash;
193
194 int double_offset = offset();
195
196 // Account for return address and saved regs if input is esp.
197 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
198
199 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
200 MemOperand exponent_operand(MemOperand(input_reg,
201 double_offset + kDoubleSize / 2));
202
203 Register scratch1;
204 {
205 Register scratch_candidates[3] = { ebx, edx, edi };
206 for (int i = 0; i < 3; i++) {
207 scratch1 = scratch_candidates[i];
208 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
209 }
210 }
211 // Since we must use ecx for shifts below, use some other register (eax)
212 // to calculate the result if ecx is the requested return register.
213 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
214 // Save ecx if it isn't the return register and therefore volatile, or if it
215 // is the return register, then save the temp register we use in its stead for
216 // the result.
217 Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
218 __ push(scratch1);
219 __ push(save_reg);
220
221 bool stash_exponent_copy = !input_reg.is(esp);
222 __ mov(scratch1, mantissa_operand);
223 if (CpuFeatures::IsSupported(SSE3)) {
224 CpuFeatureScope scope(masm, SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +0000225 // Load x87 register with heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 __ fld_d(mantissa_operand);
227 }
228 __ mov(ecx, exponent_operand);
229 if (stash_exponent_copy) __ push(ecx);
230
231 __ and_(ecx, HeapNumber::kExponentMask);
232 __ shr(ecx, HeapNumber::kExponentShift);
233 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
234 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
235 __ j(below, &process_64_bits);
236
237 // Result is entirely in lower 32-bits of mantissa
238 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
239 if (CpuFeatures::IsSupported(SSE3)) {
240 __ fstp(0);
241 }
242 __ sub(ecx, Immediate(delta));
243 __ xor_(result_reg, result_reg);
244 __ cmp(ecx, Immediate(31));
245 __ j(above, &done);
246 __ shl_cl(scratch1);
247 __ jmp(&check_negative);
248
249 __ bind(&process_64_bits);
250 if (CpuFeatures::IsSupported(SSE3)) {
251 CpuFeatureScope scope(masm, SSE3);
252 if (stash_exponent_copy) {
253 // Already a copy of the exponent on the stack, overwrite it.
254 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
255 __ sub(esp, Immediate(kDoubleSize / 2));
256 } else {
257 // Reserve space for 64 bit answer.
258 __ sub(esp, Immediate(kDoubleSize)); // Nolint.
259 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000260 // Do conversion, which cannot fail because we checked the exponent.
261 __ fisttp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
263 __ add(esp, Immediate(kDoubleSize));
264 __ jmp(&done_no_stash);
Ben Murdoch257744e2011-11-30 15:57:28 +0000265 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 // Result must be extracted from shifted 32-bit mantissa
267 __ sub(ecx, Immediate(delta));
268 __ neg(ecx);
269 if (stash_exponent_copy) {
270 __ mov(result_reg, MemOperand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100271 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 __ mov(result_reg, exponent_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100273 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 __ and_(result_reg,
275 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
276 __ add(result_reg,
277 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
278 __ shrd(result_reg, scratch1);
279 __ shr_cl(result_reg);
280 __ test(ecx, Immediate(32));
281 __ cmov(not_equal, scratch1, result_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100282 }
283
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 // If the double was negative, negate the integer result.
285 __ bind(&check_negative);
286 __ mov(result_reg, scratch1);
287 __ neg(result_reg);
288 if (stash_exponent_copy) {
289 __ cmp(MemOperand(esp, 0), Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100290 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 __ cmp(exponent_operand, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100292 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 __ cmov(greater, result_reg, scratch1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100294
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295 // Restore registers
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100296 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297 if (stash_exponent_copy) {
298 __ add(esp, Immediate(kDoubleSize / 2));
299 }
300 __ bind(&done_no_stash);
301 if (!final_result_reg.is(result_reg)) {
302 DCHECK(final_result_reg.is(ecx));
303 __ mov(final_result_reg, result_reg);
304 }
305 __ pop(save_reg);
306 __ pop(scratch1);
307 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100308}
309
310
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100311void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
312 Register number) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000313 Label load_smi, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100314
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000315 __ JumpIfSmi(number, &load_smi, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100316 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000317 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100318
319 __ bind(&load_smi);
320 __ SmiUntag(number);
321 __ push(number);
322 __ fild_s(Operand(esp, 0));
323 __ pop(number);
324
325 __ bind(&done);
326}
327
328
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100329void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
330 Label* not_numbers) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000331 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100332 // Load operand in edx into xmm0, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000333 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100334 Factory* factory = masm->isolate()->factory();
335 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100336 __ j(not_equal, not_numbers); // Argument in edx is not a number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100338 __ bind(&load_eax);
339 // Load operand in eax into xmm1, or branch to not_numbers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000340 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100341 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
Ben Murdoch257744e2011-11-30 15:57:28 +0000342 __ j(equal, &load_float_eax, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100343 __ jmp(not_numbers); // Argument in eax is not a number.
344 __ bind(&load_smi_edx);
345 __ SmiUntag(edx); // Untag smi before converting to float.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000346 __ Cvtsi2sd(xmm0, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100347 __ SmiTag(edx); // Retag smi for heap number overwriting test.
348 __ jmp(&load_eax);
349 __ bind(&load_smi_eax);
350 __ SmiUntag(eax); // Untag smi before converting to float.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 __ Cvtsi2sd(xmm1, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100352 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Ben Murdoch257744e2011-11-30 15:57:28 +0000353 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 __ bind(&load_float_eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100356 __ bind(&done);
357}
358
359
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100360void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
361 Label* non_float,
362 Register scratch) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000363 Label test_other, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100364 // Test if both operands are floats or smi -> scratch=k_is_float;
365 // Otherwise scratch = k_not_float.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000366 __ JumpIfSmi(edx, &test_other, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100367 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100368 Factory* factory = masm->isolate()->factory();
369 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100370 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
371
372 __ bind(&test_other);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000373 __ JumpIfSmi(eax, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100374 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100375 __ cmp(scratch, factory->heap_number_map());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100376 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
377
378 // Fall-through: Both operands are numbers.
379 __ bind(&done);
380}
381
382
Ben Murdochb0fe1622011-05-05 13:52:32 +0100383void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000384 Factory* factory = isolate()->factory();
385 const Register exponent = MathPowTaggedDescriptor::exponent();
386 DCHECK(exponent.is(eax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100387 const Register base = edx;
388 const Register scratch = ecx;
389 const XMMRegister double_result = xmm3;
390 const XMMRegister double_base = xmm2;
391 const XMMRegister double_exponent = xmm1;
392 const XMMRegister double_scratch = xmm4;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100393
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100394 Label call_runtime, done, exponent_not_smi, int_exponent;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100395
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100396 // Save 1 in double_result - we need this several times later on.
397 __ mov(scratch, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000398 __ Cvtsi2sd(double_result, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100399
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100401 Label base_is_smi, unpack_exponent;
402 // The exponent and base are supplied as arguments on the stack.
403 // This can only happen if the stub is called from non-optimized code.
404 // Load input parameters from stack.
405 __ mov(base, Operand(esp, 2 * kPointerSize));
406 __ mov(exponent, Operand(esp, 1 * kPointerSize));
407
408 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
409 __ cmp(FieldOperand(base, HeapObject::kMapOffset),
410 factory->heap_number_map());
411 __ j(not_equal, &call_runtime);
412
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000413 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100414 __ jmp(&unpack_exponent, Label::kNear);
415
416 __ bind(&base_is_smi);
417 __ SmiUntag(base);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 __ Cvtsi2sd(double_base, base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100419
420 __ bind(&unpack_exponent);
421 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
422 __ SmiUntag(exponent);
423 __ jmp(&int_exponent);
424
425 __ bind(&exponent_not_smi);
426 __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
427 factory->heap_number_map());
428 __ j(not_equal, &call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 __ movsd(double_exponent,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100430 FieldOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000431 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100432 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
433 __ SmiUntag(exponent);
434 __ jmp(&int_exponent);
435
436 __ bind(&exponent_not_smi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 __ movsd(double_exponent,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100438 FieldOperand(exponent, HeapNumber::kValueOffset));
439 }
440
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 if (exponent_type() != INTEGER) {
442 Label fast_power, try_arithmetic_simplification;
443 __ DoubleToI(exponent, double_exponent, double_scratch,
444 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
445 &try_arithmetic_simplification,
446 &try_arithmetic_simplification);
447 __ jmp(&int_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100448
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 __ bind(&try_arithmetic_simplification);
450 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
451 __ cvttsd2si(exponent, Operand(double_exponent));
452 __ cmp(exponent, Immediate(0x1));
453 __ j(overflow, &call_runtime);
454
455 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 // Detect square root case. Crankshaft detects constant +/-0.5 at
457 // compile time and uses DoMathPowHalf instead. We then skip this check
458 // for non-constant cases of +/-0.5 as these hardly occur.
459 Label continue_sqrt, continue_rsqrt, not_plus_half;
460 // Test for 0.5.
461 // Load double_scratch with 0.5.
462 __ mov(scratch, Immediate(0x3F000000u));
463 __ movd(double_scratch, scratch);
464 __ cvtss2sd(double_scratch, double_scratch);
465 // Already ruled out NaNs for exponent.
466 __ ucomisd(double_scratch, double_exponent);
467 __ j(not_equal, &not_plus_half, Label::kNear);
468
469 // Calculates square root of base. Check for the special case of
470 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
471 // According to IEEE-754, single-precision -Infinity has the highest
472 // 9 bits set and the lowest 23 bits cleared.
473 __ mov(scratch, 0xFF800000u);
474 __ movd(double_scratch, scratch);
475 __ cvtss2sd(double_scratch, double_scratch);
476 __ ucomisd(double_base, double_scratch);
477 // Comparing -Infinity with NaN results in "unordered", which sets the
478 // zero flag as if both were equal. However, it also sets the carry flag.
479 __ j(not_equal, &continue_sqrt, Label::kNear);
480 __ j(carry, &continue_sqrt, Label::kNear);
481
482 // Set result to Infinity in the special case.
483 __ xorps(double_result, double_result);
484 __ subsd(double_result, double_scratch);
485 __ jmp(&done);
486
487 __ bind(&continue_sqrt);
488 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
489 __ xorps(double_scratch, double_scratch);
490 __ addsd(double_scratch, double_base); // Convert -0 to +0.
491 __ sqrtsd(double_result, double_scratch);
492 __ jmp(&done);
493
494 // Test for -0.5.
495 __ bind(&not_plus_half);
496 // Load double_exponent with -0.5 by substracting 1.
497 __ subsd(double_scratch, double_result);
498 // Already ruled out NaNs for exponent.
499 __ ucomisd(double_scratch, double_exponent);
500 __ j(not_equal, &fast_power, Label::kNear);
501
502 // Calculates reciprocal of square root of base. Check for the special
503 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
504 // According to IEEE-754, single-precision -Infinity has the highest
505 // 9 bits set and the lowest 23 bits cleared.
506 __ mov(scratch, 0xFF800000u);
507 __ movd(double_scratch, scratch);
508 __ cvtss2sd(double_scratch, double_scratch);
509 __ ucomisd(double_base, double_scratch);
510 // Comparing -Infinity with NaN results in "unordered", which sets the
511 // zero flag as if both were equal. However, it also sets the carry flag.
512 __ j(not_equal, &continue_rsqrt, Label::kNear);
513 __ j(carry, &continue_rsqrt, Label::kNear);
514
515 // Set result to 0 in the special case.
516 __ xorps(double_result, double_result);
517 __ jmp(&done);
518
519 __ bind(&continue_rsqrt);
520 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
521 __ xorps(double_exponent, double_exponent);
522 __ addsd(double_exponent, double_base); // Convert -0 to +0.
523 __ sqrtsd(double_exponent, double_exponent);
524 __ divsd(double_result, double_exponent);
525 __ jmp(&done);
526 }
527
528 // Using FPU instructions to calculate power.
529 Label fast_power_failed;
530 __ bind(&fast_power);
531 __ fnclex(); // Clear flags to catch exceptions later.
532 // Transfer (B)ase and (E)xponent onto the FPU register stack.
533 __ sub(esp, Immediate(kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 __ movsd(Operand(esp, 0), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100535 __ fld_d(Operand(esp, 0)); // E
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 __ movsd(Operand(esp, 0), double_base);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100537 __ fld_d(Operand(esp, 0)); // B, E
538
539 // Exponent is in st(1) and base is in st(0)
540 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
541 // FYL2X calculates st(1) * log2(st(0))
542 __ fyl2x(); // X
543 __ fld(0); // X, X
544 __ frndint(); // rnd(X), X
545 __ fsub(1); // rnd(X), X-rnd(X)
546 __ fxch(1); // X - rnd(X), rnd(X)
547 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
548 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
549 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100551 // FSCALE calculates st(0) * 2^st(1)
552 __ fscale(); // 2^X, rnd(X)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 __ fstp(1); // 2^X
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100554 // Bail out to runtime in case of exceptions in the status word.
555 __ fnstsw_ax();
556 __ test_b(eax, 0x5F); // We check for all but precision exception.
557 __ j(not_zero, &fast_power_failed, Label::kNear);
558 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 __ movsd(double_result, Operand(esp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100560 __ add(esp, Immediate(kDoubleSize));
561 __ jmp(&done);
562
563 __ bind(&fast_power_failed);
564 __ fninit();
565 __ add(esp, Immediate(kDoubleSize));
566 __ jmp(&call_runtime);
567 }
568
569 // Calculate power with integer exponent.
570 __ bind(&int_exponent);
571 const XMMRegister double_scratch2 = double_exponent;
572 __ mov(scratch, exponent); // Back up exponent.
573 __ movsd(double_scratch, double_base); // Back up base.
574 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100575
576 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 Label no_neg, while_true, while_false;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100578 __ test(scratch, scratch);
579 __ j(positive, &no_neg, Label::kNear);
580 __ neg(scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100581 __ bind(&no_neg);
582
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 __ j(zero, &while_false, Label::kNear);
584 __ shr(scratch, 1);
585 // Above condition means CF==0 && ZF==0. This means that the
586 // bit that has been shifted out is 0 and the result is not 0.
587 __ j(above, &while_true, Label::kNear);
588 __ movsd(double_result, double_scratch);
589 __ j(zero, &while_false, Label::kNear);
590
Ben Murdoch85b71792012-04-11 18:30:58 +0100591 __ bind(&while_true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100592 __ shr(scratch, 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 __ mulsd(double_scratch, double_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 __ j(above, &while_true, Label::kNear);
595 __ mulsd(double_result, double_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100596 __ j(not_zero, &while_true);
597
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 __ bind(&while_false);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100599 // scratch has the original value of the exponent - if the exponent is
600 // negative, return 1/result.
601 __ test(exponent, exponent);
602 __ j(positive, &done);
603 __ divsd(double_scratch2, double_result);
604 __ movsd(double_result, double_scratch2);
605 // Test whether result is zero. Bail out to check for subnormal result.
606 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
607 __ xorps(double_scratch2, double_scratch2);
608 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
609 // double_exponent aliased as double_scratch2 has already been overwritten
610 // and may not have contained the exponent value in the first place when the
611 // exponent is a smi. We reset it with exponent value before bailing out.
612 __ j(not_equal, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000613 __ Cvtsi2sd(double_exponent, exponent);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100614
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616 Counters* counters = isolate()->counters();
617 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 // The arguments are still on the stack.
619 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100621
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100622 // The stub is called from non-optimized code, which expects the result
623 // as heap number in exponent.
624 __ bind(&done);
625 __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100627 __ IncrementCounter(counters->math_pow(), 1);
628 __ ret(2 * kPointerSize);
629 } else {
630 __ bind(&call_runtime);
631 {
632 AllowExternalCallThatCantCauseGC scope(masm);
633 __ PrepareCallCFunction(4, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
635 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100636 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100638 }
639 // Return value is in st(0) on ia32.
640 // Store it into the (fixed) result register.
641 __ sub(esp, Immediate(kDoubleSize));
642 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643 __ movsd(double_result, Operand(esp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100644 __ add(esp, Immediate(kDoubleSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100645
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646 __ bind(&done);
647 __ IncrementCounter(counters->math_pow(), 1);
648 __ ret(0);
649 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100650}
651
652
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
654 Label miss;
655 Register receiver = LoadDescriptor::ReceiverRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 // With careful management, we won't have to save slot and vector on
657 // the stack. Simply handle the possibly missing case first.
658 // TODO(mvstanton): this code can be more efficient.
659 __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
660 Immediate(isolate()->factory()->the_hole_value()));
661 __ j(equal, &miss);
662 __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
663 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665 __ bind(&miss);
666 PropertyAccessCompiler::TailCallBuiltin(
667 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
668}
669
670
671void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
672 // Return address is on the stack.
673 Label slow;
674
675 Register receiver = LoadDescriptor::ReceiverRegister();
676 Register key = LoadDescriptor::NameRegister();
677 Register scratch = eax;
678 DCHECK(!scratch.is(receiver) && !scratch.is(key));
679
680 // Check that the key is an array index, that is Uint32.
681 __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
682 __ j(not_zero, &slow);
683
684 // Everything is fine, call runtime.
685 __ pop(scratch);
686 __ push(receiver); // receiver
687 __ push(key); // key
688 __ push(scratch); // return address
689
690 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000691 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692
693 __ bind(&slow);
694 PropertyAccessCompiler::TailCallBuiltin(
695 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
696}
697
698
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400699void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
700 // Return address is on the stack.
701 Label miss;
702
703 Register receiver = LoadDescriptor::ReceiverRegister();
704 Register index = LoadDescriptor::NameRegister();
705 Register scratch = edi;
706 DCHECK(!scratch.is(receiver) && !scratch.is(index));
707 Register result = eax;
708 DCHECK(!result.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
710 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400711
712 // StringCharAtGenerator doesn't use the result register until it's passed
713 // the different miss possibilities. If it did, we would have a conflict
714 // when FLAG_vector_ics is true.
715 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
716 &miss, // When not a string.
717 &miss, // When not a number.
718 &miss, // When index out of range.
719 STRING_INDEX_IS_ARRAY_INDEX,
720 RECEIVER_IS_STRING);
721 char_at_generator.GenerateFast(masm);
722 __ ret(0);
723
724 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000725 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400726
727 __ bind(&miss);
728 PropertyAccessCompiler::TailCallBuiltin(
729 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
730}
731
732
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100733void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
734 // The key is in edx and the parameter count is in eax.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 DCHECK(edx.is(ArgumentsAccessReadDescriptor::index()));
736 DCHECK(eax.is(ArgumentsAccessReadDescriptor::parameter_count()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100737
738 // The displacement is used for skipping the frame pointer on the
739 // stack. It is the offset of the last parameter (if any) relative
740 // to the frame pointer.
741 static const int kDisplacement = 1 * kPointerSize;
742
743 // Check that the key is a smi.
744 Label slow;
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000745 __ JumpIfNotSmi(edx, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100746
747 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000748 Label adaptor;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100749 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
750 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100751 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000752 __ j(equal, &adaptor, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100753
754 // Check index against formal parameters count limit passed in
755 // through register eax. Use unsigned comparison to get negative
756 // check for free.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100757 __ cmp(edx, eax);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000758 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100759
760 // Read the argument from the stack and return it.
761 STATIC_ASSERT(kSmiTagSize == 1);
762 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
763 __ lea(ebx, Operand(ebp, eax, times_2, 0));
764 __ neg(edx);
765 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
766 __ ret(0);
767
768 // Arguments adaptor case: Check index against actual arguments
769 // limit found in the arguments adaptor frame. Use unsigned
770 // comparison to get negative check for free.
771 __ bind(&adaptor);
772 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100773 __ cmp(edx, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000774 __ j(above_equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100775
776 // Read the argument from the stack and return it.
777 STATIC_ASSERT(kSmiTagSize == 1);
778 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
779 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
780 __ neg(edx);
781 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
782 __ ret(0);
783
784 // Slow-case: Handle non-smi or out-of-bounds access to arguments
785 // by calling the runtime system.
786 __ bind(&slow);
787 __ pop(ebx); // Return address.
788 __ push(edx);
789 __ push(ebx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000790 __ TailCallRuntime(Runtime::kArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100791}
792
793
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000794void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000795 // ecx : number of parameters (tagged)
796 // edx : parameters pointer
797 // edi : function
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100798 // esp[0] : return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799
800 DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
801 DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
802 DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100803
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000804 // Check if the calling frame is an arguments adaptor frame.
805 Label runtime;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
807 __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
808 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000809 __ j(not_equal, &runtime, Label::kNear);
810
811 // Patch the arguments.length and the parameters pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000812 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
813 __ lea(edx,
814 Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000815
816 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000817 __ pop(eax); // Pop return address.
818 __ push(edi); // Push function.
819 __ push(edx); // Push parameters pointer.
820 __ push(ecx); // Push parameter count.
821 __ push(eax); // Push return address.
822 __ TailCallRuntime(Runtime::kNewSloppyArguments);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000823}
824
825
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000827 // ecx : number of parameters (tagged)
828 // edx : parameters pointer
829 // edi : function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000830 // esp[0] : return address
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000831
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000832 DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
833 DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
834 DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000835
836 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000837 Label adaptor_frame, try_allocate, runtime;
838 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
839 __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
840 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000841 __ j(equal, &adaptor_frame, Label::kNear);
842
843 // No adaptor, parameter count = argument count.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 __ mov(ebx, ecx);
845 __ push(ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000846 __ jmp(&try_allocate, Label::kNear);
847
848 // We have an adaptor frame. Patch the parameters pointer.
849 __ bind(&adaptor_frame);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000850 __ mov(ebx, ecx);
851 __ push(ecx);
852 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000853 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
854 __ lea(edx, Operand(edx, ecx, times_2,
855 StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000856
857 // ebx = parameter count (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000858 // ecx = argument count (smi-tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000859 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100860 __ cmp(ebx, ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000861 __ j(less_equal, &try_allocate, Label::kNear);
862 __ mov(ebx, ecx);
863
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864 // Save mapped parameter count and function.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000865 __ bind(&try_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000866 __ push(edi);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000867 __ push(ebx);
868
869 // Compute the sizes of backing store, parameter map, and arguments object.
870 // 1. Parameter map, has 2 extra words containing context and backing store.
871 const int kParameterMapHeaderSize =
872 FixedArray::kHeaderSize + 2 * kPointerSize;
873 Label no_parameter_map;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100874 __ test(ebx, ebx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000875 __ j(zero, &no_parameter_map, Label::kNear);
876 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
877 __ bind(&no_parameter_map);
878
879 // 2. Backing store.
880 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
881
882 // 3. Arguments object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000884
885 // Do the allocation of all three objects in one go.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000886 __ Allocate(ebx, eax, edi, no_reg, &runtime, TAG_OBJECT);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000887
888 // eax = address of new object(s) (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 // ecx = argument count (smi-tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000890 // esp[0] = mapped parameter count (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 // esp[4] = function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000892 // esp[8] = parameter count (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 // Get the arguments map from the current native context into edi.
894 Label has_mapped_parameters, instantiate;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895 __ mov(edi, NativeContextOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000896 __ mov(ebx, Operand(esp, 0 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100897 __ test(ebx, ebx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000898 __ j(not_zero, &has_mapped_parameters, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 __ mov(
900 edi,
901 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
902 __ jmp(&instantiate, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000903
904 __ bind(&has_mapped_parameters);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905 __ mov(edi, Operand(edi, Context::SlotOffset(
906 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907 __ bind(&instantiate);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000908
909 // eax = address of new object (tagged)
910 // ebx = mapped parameter count (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911 // ecx = argument count (smi-tagged)
912 // edi = address of arguments map (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000913 // esp[0] = mapped parameter count (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914 // esp[4] = function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000915 // esp[8] = parameter count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000916 // Copy the JS object part.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000917 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
918 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
919 masm->isolate()->factory()->empty_fixed_array());
920 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
921 masm->isolate()->factory()->empty_fixed_array());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000922
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100923 // Set up the callee in-object property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000924 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000925 __ mov(edi, Operand(esp, 1 * kPointerSize));
926 __ AssertNotSmi(edi);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000927 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928 Heap::kArgumentsCalleeIndex * kPointerSize),
929 edi);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000930
931 // Use the length (smi tagged) and set that as an in-object property too.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 __ AssertSmi(ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000933 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
934 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
935 Heap::kArgumentsLengthIndex * kPointerSize),
936 ecx);
937
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100938 // Set up the elements pointer in the allocated arguments object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000939 // If we allocated a parameter map, edi will point there, otherwise to the
940 // backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000942 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
943
944 // eax = address of new object (tagged)
945 // ebx = mapped parameter count (tagged)
946 // ecx = argument count (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000947 // edx = address of receiver argument
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000948 // edi = address of parameter map or backing store (tagged)
949 // esp[0] = mapped parameter count (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 // esp[4] = function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000951 // esp[8] = parameter count (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 // Free two registers.
953 __ push(edx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000954 __ push(eax);
955
956 // Initialize parameter map. If there are no mapped arguments, we're done.
957 Label skip_parameter_map;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100958 __ test(ebx, ebx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000959 __ j(zero, &skip_parameter_map);
960
961 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000963 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
964 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
965 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
966 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
967 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
968
969 // Copy the parameter slots and the holes in the arguments.
970 // We need to fill in mapped_parameter_count slots. They index the context,
971 // where parameters are stored in reverse order, at
972 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
973 // The mapped parameter thus need to get indices
974 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
975 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
976 // We loop from right to left.
977 Label parameters_loop, parameters_test;
978 __ push(ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979 __ mov(eax, Operand(esp, 3 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000980 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981 __ add(ebx, Operand(esp, 5 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100982 __ sub(ebx, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000983 __ mov(ecx, isolate()->factory()->the_hole_value());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000984 __ mov(edx, edi);
985 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
986 // eax = loop variable (tagged)
987 // ebx = mapping index (tagged)
988 // ecx = the hole value
989 // edx = address of parameter map (tagged)
990 // edi = address of backing store (tagged)
991 // esp[0] = argument count (tagged)
992 // esp[4] = address of new object (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000993 // esp[8] = address of receiver argument
994 // esp[12] = mapped parameter count (tagged)
995 // esp[16] = function
996 // esp[20] = parameter count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000997 __ jmp(&parameters_test, Label::kNear);
998
999 __ bind(&parameters_loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001000 __ sub(eax, Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001001 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1002 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001003 __ add(ebx, Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001004 __ bind(&parameters_test);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001005 __ test(eax, eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001006 __ j(not_zero, &parameters_loop, Label::kNear);
1007 __ pop(ecx);
1008
1009 __ bind(&skip_parameter_map);
1010
1011 // ecx = argument count (tagged)
1012 // edi = address of backing store (tagged)
1013 // esp[0] = address of new object (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001014 // esp[4] = address of receiver argument
1015 // esp[8] = mapped parameter count (tagged)
1016 // esp[12] = function
1017 // esp[16] = parameter count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001018 // Copy arguments header and remaining slots (if there are any).
1019 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020 Immediate(isolate()->factory()->fixed_array_map()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001021 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1022
1023 Label arguments_loop, arguments_test;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1025 __ mov(edx, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001026 __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
1027 __ sub(edx, ebx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001028 __ jmp(&arguments_test, Label::kNear);
1029
1030 __ bind(&arguments_loop);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001031 __ sub(edx, Immediate(kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001032 __ mov(eax, Operand(edx, 0));
1033 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001034 __ add(ebx, Immediate(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001035
1036 __ bind(&arguments_test);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001037 __ cmp(ebx, ecx);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001038 __ j(less, &arguments_loop, Label::kNear);
1039
1040 // Restore.
1041 __ pop(eax); // Address of arguments object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 __ Drop(4);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044 // Return.
1045 __ ret(0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001046
1047 // Do the runtime call to allocate the arguments object.
1048 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049 __ pop(eax); // Remove saved mapped parameter count.
1050 __ pop(edi); // Pop saved function.
1051 __ pop(eax); // Remove saved parameter count.
1052 __ pop(eax); // Pop return address.
1053 __ push(edi); // Push function.
1054 __ push(edx); // Push parameters pointer.
1055 __ push(ecx); // Push parameter count.
1056 __ push(eax); // Push return address.
1057 __ TailCallRuntime(Runtime::kNewSloppyArguments);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001058}
1059
1060
1061void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001062 // ecx : number of parameters (tagged)
1063 // edx : parameters pointer
1064 // edi : function
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001065 // esp[0] : return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001066
1067 DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
1068 DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
1069 DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001070
1071 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 Label try_allocate, runtime;
1073 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1074 __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
1075 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1076 __ j(not_equal, &try_allocate, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001077
1078 // Patch the arguments.length and the parameters pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1080 __ lea(edx,
1081 Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001082
1083 // Try the new space allocation. Start out with computing the size of
1084 // the arguments object and the elements array.
Ben Murdoch257744e2011-11-30 15:57:28 +00001085 Label add_arguments_object;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001086 __ bind(&try_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 __ mov(eax, ecx);
1088 __ test(eax, eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00001089 __ j(zero, &add_arguments_object, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001090 __ lea(eax, Operand(eax, times_2, FixedArray::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001091 __ bind(&add_arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001092 __ add(eax, Immediate(Heap::kStrictArgumentsObjectSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001093
1094 // Do the allocation of both objects in one go.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001095 __ Allocate(eax, eax, ebx, no_reg, &runtime, TAG_OBJECT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001096
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001097 // Get the arguments map from the current native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001098 __ mov(edi, NativeContextOperand());
1099 __ mov(edi, ContextOperand(edi, Context::STRICT_ARGUMENTS_MAP_INDEX));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001100
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001101 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
1102 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1103 masm->isolate()->factory()->empty_fixed_array());
1104 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1105 masm->isolate()->factory()->empty_fixed_array());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001106
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001107 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01001108 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 __ AssertSmi(ecx);
Steve Block44f0eee2011-05-26 01:26:41 +01001110 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001111 Heap::kArgumentsLengthIndex * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01001112 ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001113
1114 // If there are no actual arguments, we're done.
1115 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001116 __ test(ecx, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001117 __ j(zero, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001118
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001119 // Set up the elements pointer in the allocated arguments object and
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001120 // initialize the header in the elements fixed array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121 __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001122 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1123 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124 Immediate(isolate()->factory()->fixed_array_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001125 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001127 // Untag the length for the loop below.
1128 __ SmiUntag(ecx);
1129
1130 // Copy the fixed array slots.
Ben Murdoch257744e2011-11-30 15:57:28 +00001131 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001132 __ bind(&loop);
1133 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
1134 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001135 __ add(edi, Immediate(kPointerSize));
1136 __ sub(edx, Immediate(kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001137 __ dec(ecx);
1138 __ j(not_zero, &loop);
1139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140 // Return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001141 __ bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001142 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001143
1144 // Do the runtime call to allocate the arguments object.
1145 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146 __ pop(eax); // Pop return address.
1147 __ push(edi); // Push function.
1148 __ push(edx); // Push parameters pointer.
1149 __ push(ecx); // Push parameter count.
1150 __ push(eax); // Push return address.
1151 __ TailCallRuntime(Runtime::kNewStrictArguments);
1152}
1153
1154
1155void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1156 // ecx : number of parameters (tagged)
1157 // edx : parameters pointer
1158 // ebx : rest parameter index (tagged)
1159 // esp[0] : return address
1160
1161 // Check if the calling frame is an arguments adaptor frame.
1162 Label runtime;
1163 __ mov(edi, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1164 __ mov(eax, Operand(edi, StandardFrameConstants::kContextOffset));
1165 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1166 __ j(not_equal, &runtime);
1167
1168 // Patch the arguments.length and the parameters pointer.
1169 __ mov(ecx, Operand(edi, ArgumentsAdaptorFrameConstants::kLengthOffset));
1170 __ lea(edx,
1171 Operand(edi, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
1172
1173 __ bind(&runtime);
1174 __ pop(eax); // Save return address.
1175 __ push(ecx); // Push number of parameters.
1176 __ push(edx); // Push parameters pointer.
1177 __ push(ebx); // Push rest parameter index.
1178 __ push(eax); // Push return address.
1179 __ TailCallRuntime(Runtime::kNewRestParam);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001180}
1181
1182
1183void RegExpExecStub::Generate(MacroAssembler* masm) {
1184 // Just jump directly to runtime if native RegExp is not selected at compile
1185 // time or if regexp entry in generated code is turned off runtime switch or
1186 // at compilation.
1187#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001188 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001189#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001190
1191 // Stack frame on entry.
1192 // esp[0]: return address
1193 // esp[4]: last_match_info (expected JSArray)
1194 // esp[8]: previous index
1195 // esp[12]: subject string
1196 // esp[16]: JSRegExp object
1197
1198 static const int kLastMatchInfoOffset = 1 * kPointerSize;
1199 static const int kPreviousIndexOffset = 2 * kPointerSize;
1200 static const int kSubjectOffset = 3 * kPointerSize;
1201 static const int kJSRegExpOffset = 4 * kPointerSize;
1202
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203 Label runtime;
1204 Factory* factory = isolate()->factory();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001205
1206 // Ensure that a RegExp stack is allocated.
1207 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001208 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001209 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001211 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001212 __ test(ebx, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001213 __ j(zero, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001214
1215 // Check that the first argument is a JSRegExp object.
1216 __ mov(eax, Operand(esp, kJSRegExpOffset));
1217 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001218 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001219 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1220 __ j(not_equal, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001222 // Check that the RegExp has been compiled (data contains a fixed array).
1223 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1224 if (FLAG_debug_code) {
1225 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001227 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001229 }
1230
1231 // ecx: RegExp data (FixedArray)
1232 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1233 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001234 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001235 __ j(not_equal, &runtime);
1236
1237 // ecx: RegExp data (FixedArray)
1238 // Check that the number of captures fit in the static offsets vector buffer.
1239 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001240 // Check (number_of_captures + 1) * 2 <= offsets vector size
1241 // Or number_of_captures * 2 <= offsets vector size - 2
1242 // Multiplying by 2 comes for free since edx is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001243 STATIC_ASSERT(kSmiTag == 0);
1244 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001245 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1246 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001247 __ j(above, &runtime);
1248
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001249 // Reset offset for possibly sliced string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001250 __ Move(edi, Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001251 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 __ JumpIfSmi(eax, &runtime);
1253 __ mov(edx, eax); // Make a copy of the original subject string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001254 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1255 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001256
1257 // eax: subject string
1258 // edx: subject string
1259 // ebx: subject string instance type
1260 // ecx: RegExp data (FixedArray)
1261 // Handle subject string according to its encoding and representation:
1262 // (1) Sequential two byte? If yes, go to (9).
1263 // (2) Sequential one byte? If yes, go to (6).
1264 // (3) Anything but sequential or cons? If yes, go to (7).
1265 // (4) Cons string. If the string is flat, replace subject with first string.
1266 // Otherwise bailout.
1267 // (5a) Is subject sequential two byte? If yes, go to (9).
1268 // (5b) Is subject external? If yes, go to (8).
1269 // (6) One byte sequential. Load regexp code for one byte.
1270 // (E) Carry on.
1271 /// [...]
1272
1273 // Deferred code at the end of the stub:
1274 // (7) Not a long external string? If yes, go to (10).
1275 // (8) External string. Make it, offset-wise, look like a sequential string.
1276 // (8a) Is the external string one byte? If yes, go to (6).
1277 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1278 // (10) Short external string or not a string? If yes, bail out to runtime.
1279 // (11) Sliced string. Replace subject with parent. Go to (5a).
1280
1281 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1282 external_string /* 8 */, check_underlying /* 5a */,
1283 not_seq_nor_cons /* 7 */, check_code /* E */,
1284 not_long_external /* 10 */;
1285
1286 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001287 __ and_(ebx, kIsNotStringMask |
1288 kStringRepresentationMask |
1289 kStringEncodingMask |
1290 kShortExternalStringMask);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001291 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001292 __ j(zero, &seq_two_byte_string); // Go to (9).
1293
1294 // (2) Sequential one byte? If yes, go to (6).
1295 // Any other sequential string must be one byte.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001296 __ and_(ebx, Immediate(kIsNotStringMask |
1297 kStringRepresentationMask |
1298 kShortExternalStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001300
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001301 // (3) Anything but sequential or cons? If yes, go to (7).
1302 // We check whether the subject string is a cons, since sequential strings
1303 // have already been covered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001304 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1305 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001306 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1307 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1308 __ cmp(ebx, Immediate(kExternalStringTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001309 __ j(greater_equal, &not_seq_nor_cons); // Go to (7).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001310
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 // (4) Cons string. Check that it's flat.
1312 // Replace subject with first string and reload instance type.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001313 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001314 __ j(not_equal, &runtime);
1315 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 __ bind(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001317 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1319
1320 // (5a) Is subject sequential two byte? If yes, go to (9).
1321 __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001322 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001323 __ j(zero, &seq_two_byte_string); // Go to (9).
1324 // (5b) Is subject external? If yes, go to (8).
1325 __ test_b(ebx, kStringRepresentationMask);
1326 // The underlying external string is never a short external string.
1327 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1328 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1329 __ j(not_zero, &external_string); // Go to (8).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001330
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001331 // eax: sequential subject string (or look-alike, external string)
1332 // edx: original subject string
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001333 // ecx: RegExp data (FixedArray)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001334 // (6) One byte sequential. Load regexp code for one byte.
1335 __ bind(&seq_one_byte_string);
1336 // Load previous index and check range before edx is overwritten. We have
1337 // to use edx instead of eax here because it might have been only made to
1338 // look like a sequential string when it actually is an external string.
1339 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1340 __ JumpIfNotSmi(ebx, &runtime);
1341 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1342 __ j(above_equal, &runtime);
1343 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
1344 __ Move(ecx, Immediate(1)); // Type is one byte.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001345
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 // (E) Carry on. String handling is done.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001347 __ bind(&check_code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001348 // edx: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001349 // Check that the irregexp code has been generated for the actual string
1350 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00001351 // a smi (code flushing support).
1352 __ JumpIfSmi(edx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001353
1354 // eax: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001355 // ebx: previous index (smi)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001356 // edx: code
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001358 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01001360 __ IncrementCounter(counters->regexp_entry_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001361
Steve Block44f0eee2011-05-26 01:26:41 +01001362 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363 static const int kRegExpExecuteArguments = 9;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001364 __ EnterApiExitFrame(kRegExpExecuteArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001365
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001366 // Argument 9: Pass current isolate address.
1367 __ mov(Operand(esp, 8 * kPointerSize),
1368 Immediate(ExternalReference::isolate_address(isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01001369
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001370 // Argument 8: Indicate that this is a direct call from JavaScript.
1371 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001372
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001373 // Argument 7: Start (high end) of backtracking stack memory area.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001374 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1375 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 __ mov(Operand(esp, 6 * kPointerSize), esi);
1377
1378 // Argument 6: Set the number of capture registers to zero to force global
1379 // regexps to behave as non-global. This does not affect non-global regexps.
1380 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001381
1382 // Argument 5: static offsets vector buffer.
1383 __ mov(Operand(esp, 4 * kPointerSize),
Steve Block44f0eee2011-05-26 01:26:41 +01001384 Immediate(ExternalReference::address_of_static_offsets_vector(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001386
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001387 // Argument 2: Previous index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 __ SmiUntag(ebx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001389 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1390
1391 // Argument 1: Original subject string.
1392 // The original subject is in the previous stack frame. Therefore we have to
1393 // use ebp, which points exactly to one pointer size below the previous esp.
1394 // (Because creating a new stack frame pushes the previous ebp onto the stack
1395 // and thereby moves up esp by one kPointerSize.)
1396 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
1397 __ mov(Operand(esp, 0 * kPointerSize), esi);
1398
1399 // esi: original subject string
1400 // eax: underlying subject string
1401 // ebx: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001403 // edx: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001404 // Argument 4: End of string data
1405 // Argument 3: Start of string data
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001406 // Prepare start and end index of the input.
1407 // Load the length from the original sliced string if that is the case.
1408 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001409 __ add(esi, edi); // Calculate input end wrt offset.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001410 __ SmiUntag(edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001411 __ add(ebx, edi); // Calculate input start wrt offset.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001412
1413 // ebx: start index of the input string
1414 // esi: end index of the input string
1415 Label setup_two_byte, setup_rest;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001416 __ test(ecx, ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001417 __ j(zero, &setup_two_byte, Label::kNear);
1418 __ SmiUntag(esi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001420 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001422 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00001423 __ jmp(&setup_rest, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001424
1425 __ bind(&setup_two_byte);
1426 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001427 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
1428 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001429 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1430 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
1431 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1432
1433 __ bind(&setup_rest);
1434
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001435 // Locate the code entry and call it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001436 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1437 __ call(edx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001438
1439 // Drop arguments and come back to JS mode.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001440 __ LeaveApiExitFrame(true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001441
1442 // Check the result.
1443 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001444 __ cmp(eax, 1);
1445 // We expect exactly one result since we force the called regexp to behave
1446 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +00001447 __ j(equal, &success);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001448 Label failure;
1449 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001450 __ j(equal, &failure);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001451 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
1452 // If not exception it can only be retry. Handle that in the runtime system.
1453 __ j(not_equal, &runtime);
1454 // Result must now be exception. If there is no pending exception already a
1455 // stack overflow (on the backtrack stack) was detected in RegExp code but
1456 // haven't created the exception yet. Handle that in the runtime system.
1457 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdoch589d6972011-11-30 16:04:58 +00001458 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459 isolate());
1460 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001461 __ mov(eax, Operand::StaticVariable(pending_exception));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001462 __ cmp(edx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001463 __ j(equal, &runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001465 // For exception, throw the exception again.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001466 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001467
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001468 __ bind(&failure);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001469 // For failure to match, return null.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001470 __ mov(eax, factory->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001471 __ ret(4 * kPointerSize);
1472
1473 // Load RegExp data.
1474 __ bind(&success);
1475 __ mov(eax, Operand(esp, kJSRegExpOffset));
1476 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1477 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1478 // Calculate number of capture registers (number_of_captures + 1) * 2.
1479 STATIC_ASSERT(kSmiTag == 0);
1480 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001481 __ add(edx, Immediate(2)); // edx was a smi.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001482
1483 // edx: Number of capture registers
1484 // Load last_match_info which is still known to be a fast case JSArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485 // Check that the fourth object is a JSArray object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001486 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001487 __ JumpIfSmi(eax, &runtime);
1488 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1489 __ j(not_equal, &runtime);
1490 // Check that the JSArray is in fast case.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001491 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
1493 __ cmp(eax, factory->fixed_array_map());
1494 __ j(not_equal, &runtime);
1495 // Check that the last match info has space for the capture registers and the
1496 // additional information.
1497 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
1498 __ SmiUntag(eax);
1499 __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1500 __ cmp(edx, eax);
1501 __ j(greater, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001502
1503 // ebx: last_match_info backing store (FixedArray)
1504 // edx: number of capture registers
1505 // Store the capture count.
1506 __ SmiTag(edx); // Number of capture registers to smi.
1507 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
1508 __ SmiUntag(edx); // Number of capture registers back from smi.
1509 // Store last subject and last input.
1510 __ mov(eax, Operand(esp, kSubjectOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 __ mov(ecx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001512 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001513 __ RecordWriteField(ebx,
1514 RegExpImpl::kLastSubjectOffset,
1515 eax,
1516 edi,
1517 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001518 __ mov(eax, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001519 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001520 __ RecordWriteField(ebx,
1521 RegExpImpl::kLastInputOffset,
1522 eax,
1523 edi,
1524 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001525
1526 // Get the static offsets vector filled by the native regexp code.
1527 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001528 ExternalReference::address_of_static_offsets_vector(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001529 __ mov(ecx, Immediate(address_of_static_offsets_vector));
1530
1531 // ebx: last_match_info backing store (FixedArray)
1532 // ecx: offsets vector
1533 // edx: number of capture registers
Ben Murdoch257744e2011-11-30 15:57:28 +00001534 Label next_capture, done;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001535 // Capture register counter starts from number of capture registers and
1536 // counts down until wraping after zero.
1537 __ bind(&next_capture);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001538 __ sub(edx, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001539 __ j(negative, &done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001540 // Read the value from the static offsets vector buffer.
1541 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1542 __ SmiTag(edi);
1543 // Store the smi value in the last match info.
1544 __ mov(FieldOperand(ebx,
1545 edx,
1546 times_pointer_size,
1547 RegExpImpl::kFirstCaptureOffset),
1548 edi);
1549 __ jmp(&next_capture);
1550 __ bind(&done);
1551
1552 // Return last match info.
1553 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1554 __ ret(4 * kPointerSize);
1555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 // Do the runtime call to execute the regexp.
1557 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001559
1560 // Deferred code for string handling.
1561 // (7) Not a long external string? If yes, go to (10).
1562 __ bind(&not_seq_nor_cons);
1563 // Compare flags are still set from (3).
1564 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
1565
1566 // (8) External string. Short external strings have been ruled out.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001567 __ bind(&external_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568 // Reload instance type.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001569 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1570 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1571 if (FLAG_debug_code) {
1572 // Assert that we do not have a cons or slice (indirect strings) here.
1573 // Sequential strings have already been ruled out.
1574 __ test_b(ebx, kIsIndirectStringMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001575 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001576 }
1577 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
1578 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001580 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1581 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 // (8a) Is the external string one byte? If yes, go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001583 __ test_b(ebx, kStringEncodingMask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 __ j(not_zero, &seq_one_byte_string); // Goto (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001585
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001586 // eax: sequential subject string (or look-alike, external string)
1587 // edx: original subject string
1588 // ecx: RegExp data (FixedArray)
1589 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1590 __ bind(&seq_two_byte_string);
1591 // Load previous index and check range before edx is overwritten. We have
1592 // to use edx instead of eax here because it might have been only made to
1593 // look like a sequential string when it actually is an external string.
1594 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1595 __ JumpIfNotSmi(ebx, &runtime);
1596 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1597 __ j(above_equal, &runtime);
1598 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
1599 __ Move(ecx, Immediate(0)); // Type is two byte.
1600 __ jmp(&check_code); // Go to (E).
1601
1602 // (10) Not a string or a short external string? If yes, bail out to runtime.
1603 __ bind(&not_long_external);
1604 // Catch non-string subject or short external string.
1605 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1606 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1607 __ j(not_zero, &runtime);
1608
1609 // (11) Sliced string. Replace subject with parent. Go to (5a).
1610 // Load offset into edi and replace subject string with parent.
1611 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
1612 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
1613 __ jmp(&check_underlying); // Go to (5a).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001614#endif // V8_INTERPRETED_REGEXP
1615}
1616
1617
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001618static int NegativeComparisonResult(Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001619 DCHECK(cc != equal);
1620 DCHECK((cc == less) || (cc == less_equal)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001621 || (cc == greater) || (cc == greater_equal));
1622 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1623}
1624
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001625
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626static void CheckInputType(MacroAssembler* masm, Register input,
1627 CompareICState::State expected, Label* fail) {
1628 Label ok;
1629 if (expected == CompareICState::SMI) {
1630 __ JumpIfNotSmi(input, fail);
1631 } else if (expected == CompareICState::NUMBER) {
1632 __ JumpIfSmi(input, &ok);
1633 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
1634 Immediate(masm->isolate()->factory()->heap_number_map()));
1635 __ j(not_equal, fail);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001636 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001637 // We could be strict about internalized/non-internalized here, but as long as
1638 // hydrogen doesn't care, the stub doesn't have to care either.
1639 __ bind(&ok);
1640}
1641
1642
1643static void BranchIfNotInternalizedString(MacroAssembler* masm,
1644 Label* label,
1645 Register object,
1646 Register scratch) {
1647 __ JumpIfSmi(object, label);
1648 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1649 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1650 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1651 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1652 __ j(not_zero, label);
1653}
1654
1655
1656void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 Label runtime_call, check_unequal_objects;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001658 Condition cc = GetCondition();
1659
1660 Label miss;
1661 CheckInputType(masm, edx, left(), &miss);
1662 CheckInputType(masm, eax, right(), &miss);
1663
1664 // Compare two smis.
1665 Label non_smi, smi_done;
1666 __ mov(ecx, edx);
1667 __ or_(ecx, eax);
1668 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1669 __ sub(edx, eax); // Return on the result of the subtraction.
1670 __ j(no_overflow, &smi_done, Label::kNear);
1671 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1672 __ bind(&smi_done);
1673 __ mov(eax, edx);
1674 __ ret(0);
1675 __ bind(&non_smi);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001676
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001677 // NOTICE! This code is only reached after a smi-fast-case check, so
1678 // it is certain that at least one operand isn't a smi.
1679
1680 // Identical objects can be compared fast, but there are some tricky cases
1681 // for NaN and undefined.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682 Label generic_heap_number_comparison;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001683 {
1684 Label not_identical;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001685 __ cmp(eax, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001686 __ j(not_equal, &not_identical);
1687
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001688 if (cc != equal) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001689 // Check for undefined. undefined OP undefined is false even though
1690 // undefined == undefined.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 __ cmp(edx, isolate()->factory()->undefined_value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692 if (is_strong(strength())) {
1693 // In strong mode, this comparison must throw, so call the runtime.
1694 __ j(equal, &runtime_call, Label::kFar);
1695 } else {
1696 Label check_for_nan;
1697 __ j(not_equal, &check_for_nan, Label::kNear);
1698 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1699 __ ret(0);
1700 __ bind(&check_for_nan);
1701 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001702 }
1703
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704 // Test for NaN. Compare heap numbers in a general way,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 // to handle NaNs correctly.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001706 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1707 Immediate(isolate()->factory()->heap_number_map()));
1708 __ j(equal, &generic_heap_number_comparison, Label::kNear);
1709 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001710 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1711 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001712 // Call runtime on identical JSObjects. Otherwise return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001713 __ cmpb(ecx, static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE));
1714 __ j(above_equal, &runtime_call, Label::kFar);
1715 // Call runtime on identical symbols since we need to throw a TypeError.
1716 __ cmpb(ecx, static_cast<uint8_t>(SYMBOL_TYPE));
1717 __ j(equal, &runtime_call, Label::kFar);
1718 // Call runtime on identical SIMD values since we must throw a TypeError.
1719 __ cmpb(ecx, static_cast<uint8_t>(SIMD128_VALUE_TYPE));
1720 __ j(equal, &runtime_call, Label::kFar);
1721 if (is_strong(strength())) {
1722 // We have already tested for smis and heap numbers, so if both
1723 // arguments are not strings we must proceed to the slow case.
1724 __ test(ecx, Immediate(kIsNotStringMask));
1725 __ j(not_zero, &runtime_call, Label::kFar);
1726 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001727 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1729 __ ret(0);
1730
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001731
1732 __ bind(&not_identical);
1733 }
1734
1735 // Strict equality can quickly decide whether objects are equal.
1736 // Non-strict object equality is slower, so it is handled later in the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001737 if (cc == equal && strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001738 Label slow; // Fallthrough label.
Ben Murdoch257744e2011-11-30 15:57:28 +00001739 Label not_smis;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001740 // If we're doing a strict equality comparison, we don't have to do
1741 // type conversion, so we generate code to do fast comparison for objects
1742 // and oddballs. Non-smi numbers and strings still go through the usual
1743 // slow-case code.
1744 // If either is a Smi (we know that not both are), then they can only
1745 // be equal if the other is a HeapNumber. If so, use the slow case.
1746 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001747 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001748 __ mov(ecx, Immediate(kSmiTagMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001749 __ and_(ecx, eax);
1750 __ test(ecx, edx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001751 __ j(not_zero, &not_smis, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001752 // One operand is a smi.
1753
1754 // Check whether the non-smi is a heap number.
1755 STATIC_ASSERT(kSmiTagMask == 1);
1756 // ecx still holds eax & kSmiTag, which is either zero or one.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001757 __ sub(ecx, Immediate(0x01));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001758 __ mov(ebx, edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759 __ xor_(ebx, eax);
1760 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
1761 __ xor_(ebx, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001762 // if eax was smi, ebx is now edx, else eax.
1763
1764 // Check if the non-smi operand is a heap number.
1765 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001766 Immediate(isolate()->factory()->heap_number_map()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001767 // If heap number, handle it in the slow case.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001768 __ j(equal, &slow, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001769 // Return non-equal (ebx is not zero)
1770 __ mov(eax, ebx);
1771 __ ret(0);
1772
1773 __ bind(&not_smis);
1774 // If either operand is a JSObject or an oddball value, then they are not
1775 // equal since their pointers are different
1776 // There is no test for undetectability in strict equality.
1777
1778 // Get the type of the first operand.
1779 // If the first object is a JS object, we have done pointer comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00001780 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001781 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1782 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001783 __ j(below, &first_non_object, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001784
1785 // Return non-zero (eax is not zero)
Ben Murdoch257744e2011-11-30 15:57:28 +00001786 Label return_not_equal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001787 STATIC_ASSERT(kHeapObjectTag != 0);
1788 __ bind(&return_not_equal);
1789 __ ret(0);
1790
1791 __ bind(&first_non_object);
1792 // Check for oddballs: true, false, null, undefined.
1793 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1794 __ j(equal, &return_not_equal);
1795
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001796 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001797 __ j(above_equal, &return_not_equal);
1798
1799 // Check for oddballs: true, false, null, undefined.
1800 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1801 __ j(equal, &return_not_equal);
1802
1803 // Fall through to the general case.
1804 __ bind(&slow);
1805 }
1806
1807 // Generate the number comparison code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 Label non_number_comparison;
1809 Label unordered;
1810 __ bind(&generic_heap_number_comparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001811
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1813 __ ucomisd(xmm0, xmm1);
1814 // Don't base result on EFLAGS when a NaN is involved.
1815 __ j(parity_even, &unordered, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001816
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001817 __ mov(eax, 0); // equal
1818 __ mov(ecx, Immediate(Smi::FromInt(1)));
1819 __ cmov(above, eax, ecx);
1820 __ mov(ecx, Immediate(Smi::FromInt(-1)));
1821 __ cmov(below, eax, ecx);
1822 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001823
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824 // If one of the numbers was NaN, then the result is always false.
1825 // The cc is never not-equal.
1826 __ bind(&unordered);
1827 DCHECK(cc != not_equal);
1828 if (cc == less || cc == less_equal) {
1829 __ mov(eax, Immediate(Smi::FromInt(1)));
1830 } else {
1831 __ mov(eax, Immediate(Smi::FromInt(-1)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001832 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001834
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835 // The number comparison code did not provide a valid result.
1836 __ bind(&non_number_comparison);
1837
1838 // Fast negative check for internalized-to-internalized equality.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001839 Label check_for_strings;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001840 if (cc == equal) {
1841 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1842 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001843
1844 // We've already checked for object identity, so if both operands
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001845 // are internalized they aren't equal. Register eax already holds a
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001846 // non-zero value, which indicates not equal, so just return.
1847 __ ret(0);
1848 }
1849
1850 __ bind(&check_for_strings);
1851
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1853 &check_unequal_objects);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001854
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855 // Inline comparison of one-byte strings.
1856 if (cc == equal) {
1857 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001858 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1860 edi);
Ben Murdoch257744e2011-11-30 15:57:28 +00001861 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001862#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001863 __ Abort(kUnexpectedFallThroughFromStringComparison);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001864#endif
1865
1866 __ bind(&check_unequal_objects);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001867 if (cc == equal && !strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001868 // Non-strict equality. Objects are unequal if
1869 // they are both JSObjects and not undetectable,
1870 // and their pointers are different.
Ben Murdoch257744e2011-11-30 15:57:28 +00001871 Label return_unequal;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001872 // At most one is a smi, so we can test for smi by adding the two.
1873 // A smi plus a heap object has the low bit set, a heap object plus
1874 // a heap object has the low bit clear.
1875 STATIC_ASSERT(kSmiTag == 0);
1876 STATIC_ASSERT(kSmiTagMask == 1);
1877 __ lea(ecx, Operand(eax, edx, times_1, 0));
1878 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001879 __ j(not_zero, &runtime_call, Label::kNear);
1880 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1881 __ j(below, &runtime_call, Label::kNear);
1882 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ebx);
1883 __ j(below, &runtime_call, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001884 // We do not bail out after this point. Both are JSObjects, and
1885 // they are equal if and only if both are undetectable.
1886 // The and of the undetectable flags is 1 if and only if they are equal.
1887 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1888 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00001889 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001890 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
1891 1 << Map::kIsUndetectable);
Ben Murdoch257744e2011-11-30 15:57:28 +00001892 __ j(zero, &return_unequal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001893 // The objects are both undetectable, so they both compare as the value
1894 // undefined, and are equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001895 __ Move(eax, Immediate(EQUAL));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001896 __ bind(&return_unequal);
1897 // Return non-equal by returning the non-zero object pointer in eax,
1898 // or return equal if we fell through to here.
1899 __ ret(0); // rax, rdx were pushed
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001900 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901 __ bind(&runtime_call);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001902
1903 // Push arguments below the return address.
1904 __ pop(ecx);
1905 __ push(edx);
1906 __ push(eax);
1907
1908 // Figure out which native to call and setup the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 if (cc == equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001910 __ push(ecx);
1911 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001912 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001913 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001914
1915 // Restore return address on the stack.
1916 __ push(ecx);
1917
1918 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1919 // tagged as a small integer.
1920 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
1921 : Runtime::kCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001922 }
1923
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 __ bind(&miss);
1925 GenerateMiss(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001926}
1927
1928
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001929static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1930 // eax : number of arguments to the construct function
1931 // ebx : feedback vector
1932 // edx : slot in feedback vector (Smi)
1933 // edi : the function to call
1934
1935 {
1936 FrameScope scope(masm, StackFrame::INTERNAL);
1937
1938 // Number-of-arguments register must be smi-tagged to call out.
1939 __ SmiTag(eax);
1940 __ push(eax);
1941 __ push(edi);
1942 __ push(edx);
1943 __ push(ebx);
1944
1945 __ CallStub(stub);
1946
1947 __ pop(ebx);
1948 __ pop(edx);
1949 __ pop(edi);
1950 __ pop(eax);
1951 __ SmiUntag(eax);
1952 }
1953}
1954
1955
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001956static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001957 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001958 // are uninitialized, monomorphic (indicated by a JSFunction), and
1959 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001960 // eax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001961 // ebx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 // edx : slot in feedback vector (Smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001963 // edi : the function to call
1964 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001965 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001966
1967 // Load the cache state into ecx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1969 FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001970
1971 // A monomorphic cache hit or an already megamorphic state: invoke the
1972 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001973 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1974 // at this position in a symbol (see static asserts in
1975 // type-feedback-vector.h).
1976 Label check_allocation_site;
1977 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978 __ j(equal, &done, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001979 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001980 __ j(equal, &done, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1982 Heap::kWeakCellMapRootIndex);
1983 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001985 // If the weak cell is cleared, we have a new chance to become monomorphic.
1986 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1987 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001988
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001989 __ bind(&check_allocation_site);
1990 // If we came here, we need to see if we are the array function.
1991 // If we didn't have a matching function, and we didn't find the megamorph
1992 // sentinel, then we have in the slot either some other function or an
1993 // AllocationSite.
1994 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1995 __ j(not_equal, &miss);
1996
1997 // Make sure the function is the Array() function
1998 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1999 __ cmp(edi, ecx);
2000 __ j(not_equal, &megamorphic);
2001 __ jmp(&done, Label::kFar);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002002
2003 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002004
2005 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2006 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002008 __ j(equal, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002009 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2010 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002011 __ bind(&megamorphic);
2012 __ mov(
2013 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
2014 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2015 __ jmp(&done, Label::kFar);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002016
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017 // An uninitialized cache is patched with the function or sentinel to
2018 // indicate the ElementsKind if function is the Array constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002019 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 // Make sure the function is the Array() function
2021 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2022 __ cmp(edi, ecx);
2023 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002024
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 // The target function is the Array constructor,
2026 // Create an AllocationSite if we don't already have it, store it in the
2027 // slot.
2028 CreateAllocationSiteStub create_stub(isolate);
2029 CallStubInRecordCallTarget(masm, &create_stub);
2030 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002031
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002032 __ bind(&not_array_function);
2033 CreateWeakCellStub weak_cell_stub(isolate);
2034 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002035 __ bind(&done);
2036}
2037
2038
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002039void CallConstructStub::Generate(MacroAssembler* masm) {
2040 // eax : number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 // ebx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 // edx : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002043 // edi : constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002044
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002045 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002046 // Check that function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002047 __ JumpIfSmi(edi, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002048 // Check that function is a JSFunction.
2049 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002050 __ j(not_equal, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002051
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002052 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002053
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002054 Label feedback_register_initialized;
2055 // Put the AllocationSite from the feedback vector into ebx, or undefined.
2056 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
2057 FixedArray::kHeaderSize));
2058 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
2059 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2060 __ j(equal, &feedback_register_initialized);
2061 __ mov(ebx, isolate()->factory()->undefined_value());
2062 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002063
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 __ AssertUndefinedOrAllocationSite(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066 // Pass new target to construct stub.
2067 __ mov(edx, edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002068
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002069 // Tail call to the function-specific construct stub (still in the caller
2070 // context at this point).
2071 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2072 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2073 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2074 __ jmp(ecx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002076 __ bind(&non_function);
2077 __ mov(edx, edi);
2078 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002079}
2080
2081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002082void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083 // edi - function
2084 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2087 __ cmp(edi, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002088 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002089
2090 __ mov(eax, arg_count());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002091 // Reload ecx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002092 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2093 FixedArray::kHeaderSize));
2094
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002095 // Increment the call count for monomorphic function calls.
2096 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
2097 FixedArray::kHeaderSize + kPointerSize),
2098 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002099
2100 __ mov(ebx, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002101 __ mov(edx, edi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002102 ArrayConstructorStub stub(masm->isolate(), arg_count());
2103 __ TailCallStub(&stub);
2104
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002105 // Unreachable.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106}
2107
2108
2109void CallICStub::Generate(MacroAssembler* masm) {
2110 // edi - function
2111 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002113 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002114 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002115 int argc = arg_count();
2116 ParameterCount actual(argc);
2117
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 // The checks. First, does edi match the recorded monomorphic target?
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002119 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2120 FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002121
2122 // We don't know that we have a weak cell. We might have a private symbol
2123 // or an AllocationSite, but the memory is safe to examine.
2124 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2125 // FixedArray.
2126 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2127 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2128 // computed, meaning that it can't appear to be a pointer. If the low bit is
2129 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2130 // to be a pointer.
2131 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2132 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2133 WeakCell::kValueOffset &&
2134 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2135
2136 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
2137 __ j(not_equal, &extra_checks_or_miss);
2138
2139 // The compare above could have been a SMI/SMI comparison. Guard against this
2140 // convincing us that we have a monomorphic JSFunction.
2141 __ JumpIfSmi(edi, &extra_checks_or_miss);
2142
2143 // Increment the call count for monomorphic function calls.
2144 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
2145 FixedArray::kHeaderSize + kPointerSize),
2146 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2147
2148 __ bind(&call_function);
2149 __ Set(eax, argc);
2150 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
2151 RelocInfo::CODE_TARGET);
2152
2153 __ bind(&extra_checks_or_miss);
2154 Label uninitialized, miss, not_allocation_site;
2155
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 __ j(equal, &call);
2158
2159 // Check if we have an allocation site.
2160 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
2161 Heap::kAllocationSiteMapRootIndex);
2162 __ j(not_equal, &not_allocation_site);
2163
2164 // We have an allocation site.
2165 HandleArrayCase(masm, &miss);
2166
2167 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002168
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002169 // The following cases attempt to handle MISS cases without going to the
2170 // runtime.
2171 if (FLAG_trace_ic) {
2172 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002173 }
2174
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002175 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
2176 __ j(equal, &uninitialized);
2177
2178 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2179 // to handle it here. More complex cases are dealt with in the runtime.
2180 __ AssertNotSmi(ecx);
2181 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
2182 __ j(not_equal, &miss);
2183 __ mov(
2184 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
2185 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186
2187 __ bind(&call);
2188 __ Set(eax, argc);
2189 __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
2190 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002191
2192 __ bind(&uninitialized);
2193
2194 // We are going monomorphic, provided we actually have a JSFunction.
2195 __ JumpIfSmi(edi, &miss);
2196
2197 // Goto miss case if we do not have a function.
2198 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2199 __ j(not_equal, &miss);
2200
2201 // Make sure the function is not the Array() function, which requires special
2202 // behavior on MISS.
2203 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2204 __ cmp(edi, ecx);
2205 __ j(equal, &miss);
2206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002207 // Make sure the function belongs to the same native context.
2208 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
2209 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
2210 __ cmp(ecx, NativeContextOperand());
2211 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002212
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002213 // Initialize the call counter.
2214 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2215 FixedArray::kHeaderSize + kPointerSize),
2216 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002217
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002218 // Store the function. Use a stub since we need a frame for allocation.
2219 // ebx - vector
2220 // edx - slot
2221 // edi - function
2222 {
2223 FrameScope scope(masm, StackFrame::INTERNAL);
2224 CreateWeakCellStub create_stub(isolate);
2225 __ push(edi);
2226 __ CallStub(&create_stub);
2227 __ pop(edi);
2228 }
2229
2230 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002231
2232 // We are here because tracing is on or we encountered a MISS case we can't
2233 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002234 __ bind(&miss);
2235 GenerateMiss(masm);
2236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002238
2239 // Unreachable
2240 __ int3();
2241}
2242
2243
2244void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002247 // Push the function and feedback info.
2248 __ push(edi);
2249 __ push(ebx);
2250 __ push(edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002252 // Call the entry.
2253 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002254
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002255 // Move result to edi and exit the internal frame.
2256 __ mov(edi, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002257}
2258
2259
Steve Block44f0eee2011-05-26 01:26:41 +01002260bool CEntryStub::NeedsImmovableCode() {
2261 return false;
2262}
2263
2264
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002265void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2266 CEntryStub::GenerateAheadOfTime(isolate);
2267 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2268 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002269 // It is important that the store buffer overflow stubs are generated first.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2271 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002272 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002273 BinaryOpICStub::GenerateAheadOfTime(isolate);
2274 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275 StoreFastElementStub::GenerateAheadOfTime(isolate);
2276 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002277}
2278
2279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002280void CodeStub::GenerateFPStubs(Isolate* isolate) {
2281 // Generate if not already in cache.
2282 CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
2283 isolate->set_fp_stubs_generated(true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002284}
2285
2286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2288 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2289 stub.GetCode();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002290}
2291
2292
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002293void CEntryStub::Generate(MacroAssembler* masm) {
2294 // eax: number of arguments including receiver
2295 // ebx: pointer to C function (C callee-saved)
2296 // ebp: frame pointer (restored after C call)
2297 // esp: stack pointer (restored after C call)
2298 // esi: current context (C callee-saved)
2299 // edi: JS function of the caller (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002300 //
2301 // If argv_in_register():
2302 // ecx: pointer to the first argument
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002303
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002304 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002305
2306 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002307 if (argv_in_register()) {
2308 DCHECK(!save_doubles());
2309 __ EnterApiExitFrame(3);
2310
2311 // Move argc and argv into the correct registers.
2312 __ mov(esi, ecx);
2313 __ mov(edi, eax);
2314 } else {
2315 __ EnterExitFrame(save_doubles());
2316 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002317
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 // ebx: pointer to C function (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002319 // ebp: frame pointer (restored after C call)
2320 // esp: stack pointer (restored after C call)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002321 // edi: number of arguments including receiver (C callee-saved)
2322 // esi: pointer to the first argument (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002324 // Result returned in eax, or eax+edx if result size is 2.
2325
2326 // Check stack alignment.
2327 if (FLAG_debug_code) {
2328 __ CheckStackAlignment();
2329 }
2330
2331 // Call C function.
2332 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2333 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2334 __ mov(Operand(esp, 2 * kPointerSize),
2335 Immediate(ExternalReference::isolate_address(isolate())));
2336 __ call(ebx);
2337 // Result is in eax or edx:eax - do not destroy these registers!
2338
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339 // Check result for exception sentinel.
2340 Label exception_returned;
2341 __ cmp(eax, isolate()->factory()->exception());
2342 __ j(equal, &exception_returned);
2343
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002344 // Check that there is no pending exception, otherwise we
2345 // should have returned the exception sentinel.
2346 if (FLAG_debug_code) {
2347 __ push(edx);
2348 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2349 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002350 ExternalReference pending_exception_address(
2351 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002352 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2353 // Cannot use check here as it attempts to generate call into runtime.
2354 __ j(equal, &okay, Label::kNear);
2355 __ int3();
2356 __ bind(&okay);
2357 __ pop(edx);
2358 }
2359
2360 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002361 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002362 __ ret(0);
2363
2364 // Handling of exception.
2365 __ bind(&exception_returned);
2366
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 ExternalReference pending_handler_context_address(
2368 Isolate::kPendingHandlerContextAddress, isolate());
2369 ExternalReference pending_handler_code_address(
2370 Isolate::kPendingHandlerCodeAddress, isolate());
2371 ExternalReference pending_handler_offset_address(
2372 Isolate::kPendingHandlerOffsetAddress, isolate());
2373 ExternalReference pending_handler_fp_address(
2374 Isolate::kPendingHandlerFPAddress, isolate());
2375 ExternalReference pending_handler_sp_address(
2376 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002377
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002378 // Ask the runtime for help to determine the handler. This will set eax to
2379 // contain the current pending exception, don't clobber it.
2380 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
2381 isolate());
2382 {
2383 FrameScope scope(masm, StackFrame::MANUAL);
2384 __ PrepareCallCFunction(3, eax);
2385 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc.
2386 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv.
2387 __ mov(Operand(esp, 2 * kPointerSize),
2388 Immediate(ExternalReference::isolate_address(isolate())));
2389 __ CallCFunction(find_handler, 3);
2390 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002392 // Retrieve the handler context, SP and FP.
2393 __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
2394 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
2395 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002396
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002397 // If the handler is a JS frame, restore the context to the frame. Note that
2398 // the context will be set to (esi == 0) for non-JS frames.
2399 Label skip;
2400 __ test(esi, esi);
2401 __ j(zero, &skip, Label::kNear);
2402 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2403 __ bind(&skip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002405 // Compute the handler entry address and jump to it.
2406 __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
2407 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
2408 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
2409 __ jmp(edi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002410}
2411
2412
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002414 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002415 Label not_outermost_js, not_outermost_js_2;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002416
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2418
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002419 // Set up frame.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002420 __ push(ebp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002421 __ mov(ebp, esp);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002422
2423 // Push marker in two places.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002424 int marker = type();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002425 __ push(Immediate(Smi::FromInt(marker))); // context slot
2426 __ push(Immediate(Smi::FromInt(marker))); // function slot
2427 // Save callee-saved registers (C calling conventions).
2428 __ push(edi);
2429 __ push(esi);
2430 __ push(ebx);
2431
2432 // Save copies of the top frame descriptor on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002433 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002434 __ push(Operand::StaticVariable(c_entry_fp));
2435
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002436 // If this is the outermost JS call, set js_entry_sp value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002437 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002438 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002439 __ j(not_equal, &not_outermost_js, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002440 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
Steve Block053d10c2011-06-13 19:13:29 +01002441 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 __ jmp(&invoke, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002443 __ bind(&not_outermost_js);
Steve Block053d10c2011-06-13 19:13:29 +01002444 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002445
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002446 // Jump to a faked try block that does the invoke, with a faked catch
2447 // block that sets the pending exception.
2448 __ jmp(&invoke);
2449 __ bind(&handler_entry);
2450 handler_offset_ = handler_entry.pos();
2451 // Caught exception: Store result (exception) in the pending exception
2452 // field in the JSEnv and return a failure sentinel.
Ben Murdoch589d6972011-11-30 16:04:58 +00002453 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002454 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002455 __ mov(Operand::StaticVariable(pending_exception), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002456 __ mov(eax, Immediate(isolate()->factory()->exception()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002457 __ jmp(&exit);
2458
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002459 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002460 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002461 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002462
2463 // Clear any pending exceptions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002464 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002465 __ mov(Operand::StaticVariable(pending_exception), edx);
2466
2467 // Fake a receiver (NULL).
2468 __ push(Immediate(0)); // receiver
2469
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002470 // Invoke the function by calling through JS entry trampoline builtin and
2471 // pop the faked function when we return. Notice that we cannot store a
2472 // reference to the trampoline code directly in this stub, because the
2473 // builtin stubs may not have been generated yet.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002474 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002475 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002476 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002477 __ mov(edx, Immediate(construct_entry));
2478 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002479 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002480 __ mov(edx, Immediate(entry));
2481 }
2482 __ mov(edx, Operand(edx, 0)); // deref address
2483 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002484 __ call(edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002485
2486 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002487 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002488
Steve Block053d10c2011-06-13 19:13:29 +01002489 __ bind(&exit);
Steve Block053d10c2011-06-13 19:13:29 +01002490 // Check if the current stack frame is marked as the outermost JS frame.
2491 __ pop(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002492 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002493 __ j(not_equal, &not_outermost_js_2);
2494 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2495 __ bind(&not_outermost_js_2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002496
2497 // Restore the top frame descriptor from the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01002498 __ pop(Operand::StaticVariable(ExternalReference(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002499 Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002500
2501 // Restore callee-saved registers (C calling conventions).
2502 __ pop(ebx);
2503 __ pop(esi);
2504 __ pop(edi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002505 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002506
2507 // Restore frame pointer and return.
2508 __ pop(ebp);
2509 __ ret(0);
2510}
2511
2512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002513void InstanceOfStub::Generate(MacroAssembler* masm) {
2514 Register const object = edx; // Object (lhs).
2515 Register const function = eax; // Function (rhs).
2516 Register const object_map = ecx; // Map of {object}.
2517 Register const function_map = ebx; // Map of {function}.
2518 Register const function_prototype = function_map; // Prototype of {function}.
2519 Register const scratch = edi;
Ben Murdoch086aeea2011-05-13 15:57:08 +01002520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002521 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2522 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524 // Check if {object} is a smi.
2525 Label object_is_smi;
2526 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002527
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002528 // Lookup the {function} and the {object} map in the global instanceof cache.
2529 // Note: This is safe because we clear the global instanceof cache whenever
2530 // we change the prototype of any object.
2531 Label fast_case, slow_case;
2532 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2533 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2534 __ j(not_equal, &fast_case, Label::kNear);
2535 __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
2536 __ j(not_equal, &fast_case, Label::kNear);
2537 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2538 __ ret(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002540 // If {object} is a smi we can safely return false if {function} is a JS
2541 // function, otherwise we have to miss to the runtime and throw an exception.
2542 __ bind(&object_is_smi);
2543 __ JumpIfSmi(function, &slow_case);
2544 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2545 __ j(not_equal, &slow_case);
2546 __ LoadRoot(eax, Heap::kFalseValueRootIndex);
2547 __ ret(0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002549 // Fast-case: The {function} must be a valid JSFunction.
2550 __ bind(&fast_case);
2551 __ JumpIfSmi(function, &slow_case);
2552 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2553 __ j(not_equal, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002554
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002555 // Ensure that {function} has an instance prototype.
2556 __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
2557 static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
2558 __ j(not_zero, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002560 // Get the "prototype" (or initial map) of the {function}.
2561 __ mov(function_prototype,
2562 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2563 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565 // Resolve the prototype if the {function} has an initial map. Afterwards the
2566 // {function_prototype} will be either the JSReceiver prototype object or the
2567 // hole value, which means that no instances of the {function} were created so
2568 // far and hence we should return false.
2569 Label function_prototype_valid;
2570 Register const function_prototype_map = scratch;
2571 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2572 __ j(not_equal, &function_prototype_valid, Label::kNear);
2573 __ mov(function_prototype,
2574 FieldOperand(function_prototype, Map::kPrototypeOffset));
2575 __ bind(&function_prototype_valid);
2576 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 // Update the global instanceof cache with the current {object} map and
2579 // {function}. The cached answer will be set when it is known below.
2580 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2581 __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002582
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002583 // Loop through the prototype chain looking for the {function} prototype.
2584 // Assume true, and change to false if not found.
2585 Label done, loop, fast_runtime_fallback;
2586 __ mov(eax, isolate()->factory()->true_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002587 __ bind(&loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002588
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002589 // Check if the object needs to be access checked.
2590 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset),
2591 1 << Map::kIsAccessCheckNeeded);
2592 __ j(not_zero, &fast_runtime_fallback, Label::kNear);
2593 // Check if the current object is a Proxy.
2594 __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2595 __ j(equal, &fast_runtime_fallback, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002597 __ mov(object, FieldOperand(object_map, Map::kPrototypeOffset));
2598 __ cmp(object, function_prototype);
2599 __ j(equal, &done, Label::kNear);
2600 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2601 __ cmp(object, isolate()->factory()->null_value());
2602 __ j(not_equal, &loop);
2603 __ mov(eax, isolate()->factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002605 __ bind(&done);
2606 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2607 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002608
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002609 // Found Proxy or access check needed: Call the runtime.
2610 __ bind(&fast_runtime_fallback);
2611 __ PopReturnAddressTo(scratch);
2612 __ Push(object);
2613 __ Push(function_prototype);
2614 __ PushReturnAddressFrom(scratch);
2615 // Invalidate the instanceof cache.
2616 __ Move(eax, Immediate(Smi::FromInt(0)));
2617 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2618 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002619
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002620 // Slow-case: Call the %InstanceOf runtime function.
2621 __ bind(&slow_case);
2622 __ PopReturnAddressTo(scratch);
2623 __ Push(object);
2624 __ Push(function);
2625 __ PushReturnAddressFrom(scratch);
2626 __ TailCallRuntime(Runtime::kInstanceOf);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002627}
2628
2629
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002630// -------------------------------------------------------------------------
2631// StringCharCodeAtGenerator
2632
2633void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002634 // If the receiver is a smi trigger the non-string case.
2635 STATIC_ASSERT(kSmiTag == 0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002636 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2637 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002638
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002639 // Fetch the instance type of the receiver into result register.
2640 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2641 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2642 // If the receiver is not a string trigger the non-string case.
2643 __ test(result_, Immediate(kIsNotStringMask));
2644 __ j(not_zero, receiver_not_string_);
2645 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002646
2647 // If the index is non-smi trigger the non-smi case.
2648 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002649 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002650 __ bind(&got_smi_index_);
2651
2652 // Check for index out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002653 __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002654 __ j(above_equal, index_out_of_range_);
2655
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002656 __ SmiUntag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002657
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002658 Factory* factory = masm->isolate()->factory();
2659 StringCharLoadGenerator::Generate(
2660 masm, factory, object_, index_, result_, &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002661
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002662 __ SmiTag(result_);
2663 __ bind(&exit_);
2664}
2665
2666
2667void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002668 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002669 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002670 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002671
2672 // Index is not a smi.
2673 __ bind(&index_not_smi_);
2674 // If index is a heap number, try converting it to an integer.
Steve Block44f0eee2011-05-26 01:26:41 +01002675 __ CheckMap(index_,
2676 masm->isolate()->factory()->heap_number_map(),
2677 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002678 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002679 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002680 if (embed_mode == PART_OF_IC_HANDLER) {
2681 __ push(LoadWithVectorDescriptor::VectorRegister());
2682 __ push(LoadDescriptor::SlotRegister());
2683 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002684 __ push(object_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002685 __ push(index_); // Consumed by runtime conversion function.
2686 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002688 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002689 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002690 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002691 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002692 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002693 if (!index_.is(eax)) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002694 // Save the conversion result before the pop instructions below
2695 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002696 __ mov(index_, eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002697 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002698 __ pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002699 if (embed_mode == PART_OF_IC_HANDLER) {
2700 __ pop(LoadDescriptor::SlotRegister());
2701 __ pop(LoadWithVectorDescriptor::VectorRegister());
2702 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002703 // Reload the instance type.
2704 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2705 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2706 call_helper.AfterCall(masm);
2707 // If index is still not a smi, it must be out of range.
2708 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002709 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002710 // Otherwise, return to the fast path.
2711 __ jmp(&got_smi_index_);
2712
2713 // Call runtime. We get here when the receiver is a string and the
2714 // index is a number, but the code of getting the actual character
2715 // is too complex (e.g., when the string needs to be flattened).
2716 __ bind(&call_runtime_);
2717 call_helper.BeforeCall(masm);
2718 __ push(object_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002719 __ SmiTag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002720 __ push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002722 if (!result_.is(eax)) {
2723 __ mov(result_, eax);
2724 }
2725 call_helper.AfterCall(masm);
2726 __ jmp(&exit_);
2727
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002728 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002729}
2730
2731
2732// -------------------------------------------------------------------------
2733// StringCharFromCodeGenerator
2734
2735void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2736 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2737 STATIC_ASSERT(kSmiTag == 0);
2738 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002739 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2740 __ test(code_, Immediate(kSmiTagMask |
2741 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002742 __ j(not_zero, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002743
Steve Block44f0eee2011-05-26 01:26:41 +01002744 Factory* factory = masm->isolate()->factory();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002745 __ Move(result_, Immediate(factory->single_character_string_cache()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002746 STATIC_ASSERT(kSmiTag == 0);
2747 STATIC_ASSERT(kSmiTagSize == 1);
2748 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002749 // At this point code register contains smi tagged one byte char code.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002750 __ mov(result_, FieldOperand(result_,
2751 code_, times_half_pointer_size,
2752 FixedArray::kHeaderSize));
Steve Block44f0eee2011-05-26 01:26:41 +01002753 __ cmp(result_, factory->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002754 __ j(equal, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002755 __ bind(&exit_);
2756}
2757
2758
2759void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002760 MacroAssembler* masm,
2761 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002762 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002763
2764 __ bind(&slow_case_);
2765 call_helper.BeforeCall(masm);
2766 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002767 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002768 if (!result_.is(eax)) {
2769 __ mov(result_, eax);
2770 }
2771 call_helper.AfterCall(masm);
2772 __ jmp(&exit_);
2773
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002774 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002775}
2776
2777
2778void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2779 Register dest,
2780 Register src,
2781 Register count,
2782 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002783 String::Encoding encoding) {
2784 DCHECK(!scratch.is(dest));
2785 DCHECK(!scratch.is(src));
2786 DCHECK(!scratch.is(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002787
2788 // Nothing to do for zero characters.
2789 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002790 __ test(count, count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002791 __ j(zero, &done);
2792
2793 // Make count the number of bytes to copy.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002794 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002795 __ shl(count, 1);
2796 }
2797
Ben Murdoch257744e2011-11-30 15:57:28 +00002798 Label loop;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002799 __ bind(&loop);
2800 __ mov_b(scratch, Operand(src, 0));
2801 __ mov_b(Operand(dest, 0), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002802 __ inc(src);
2803 __ inc(dest);
2804 __ dec(count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002805 __ j(not_zero, &loop);
2806
2807 __ bind(&done);
2808}
2809
2810
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002811void SubStringStub::Generate(MacroAssembler* masm) {
2812 Label runtime;
2813
2814 // Stack frame on entry.
2815 // esp[0]: return address
2816 // esp[4]: to
2817 // esp[8]: from
2818 // esp[12]: string
2819
2820 // Make sure first argument is a string.
2821 __ mov(eax, Operand(esp, 3 * kPointerSize));
2822 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002823 __ JumpIfSmi(eax, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002824 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
2825 __ j(NegateCondition(is_string), &runtime);
2826
2827 // eax: string
2828 // ebx: instance type
2829
2830 // Calculate length of sub string using the smi values.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002831 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002832 __ JumpIfNotSmi(ecx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002833 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002834 __ JumpIfNotSmi(edx, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002835 __ sub(ecx, edx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002836 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002837 Label not_original_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002838 // Shorter than original string's length: an actual substring.
2839 __ j(below, &not_original_string, Label::kNear);
2840 // Longer than original string's length or negative: unsafe arguments.
2841 __ j(above, &runtime);
2842 // Return original string.
2843 Counters* counters = isolate()->counters();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002844 __ IncrementCounter(counters->sub_string_native(), 1);
2845 __ ret(3 * kPointerSize);
2846 __ bind(&not_original_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002847
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848 Label single_char;
2849 __ cmp(ecx, Immediate(Smi::FromInt(1)));
2850 __ j(equal, &single_char);
2851
Ben Murdochc7cc0282012-03-05 14:35:55 +00002852 // eax: string
2853 // ebx: instance type
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002854 // ecx: sub string length (smi)
Ben Murdochc7cc0282012-03-05 14:35:55 +00002855 // edx: from index (smi)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002856 // Deal with different string types: update the index if necessary
2857 // and put the underlying string into edi.
2858 Label underlying_unpacked, sliced_string, seq_or_external_string;
2859 // If the string is not indirect, it can only be sequential or external.
2860 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2861 STATIC_ASSERT(kIsIndirectStringMask != 0);
2862 __ test(ebx, Immediate(kIsIndirectStringMask));
2863 __ j(zero, &seq_or_external_string, Label::kNear);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002864
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002865 Factory* factory = isolate()->factory();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002866 __ test(ebx, Immediate(kSlicedNotConsMask));
2867 __ j(not_zero, &sliced_string, Label::kNear);
2868 // Cons string. Check whether it is flat, then fetch first part.
2869 // Flat cons strings have an empty second part.
2870 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
2871 factory->empty_string());
2872 __ j(not_equal, &runtime);
2873 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
2874 // Update instance type.
2875 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002876 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002877 __ jmp(&underlying_unpacked, Label::kNear);
2878
2879 __ bind(&sliced_string);
2880 // Sliced string. Fetch parent and adjust start index by offset.
2881 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
2882 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
2883 // Update instance type.
2884 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
2885 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2886 __ jmp(&underlying_unpacked, Label::kNear);
2887
2888 __ bind(&seq_or_external_string);
2889 // Sequential or external string. Just move string to the expected register.
2890 __ mov(edi, eax);
2891
2892 __ bind(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002893
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002894 if (FLAG_string_slices) {
2895 Label copy_routine;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002896 // edi: underlying subject string
2897 // ebx: instance type of underlying subject string
2898 // edx: adjusted start index (smi)
2899 // ecx: length (smi)
2900 __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002901 // Short slice. Copy instead of slicing.
2902 __ j(less, &copy_routine);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002903 // Allocate new sliced string. At this point we do not reload the instance
2904 // type including the string encoding because we simply rely on the info
2905 // provided by the original string. It does not matter if the original
2906 // string's encoding is wrong because we always have to recheck encoding of
2907 // the newly created string's parent anyways due to externalized strings.
2908 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00002910 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2911 __ test(ebx, Immediate(kStringEncodingMask));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002912 __ j(zero, &two_byte_slice, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002914 __ jmp(&set_slice_header, Label::kNear);
2915 __ bind(&two_byte_slice);
Ben Murdoch589d6972011-11-30 16:04:58 +00002916 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002917 __ bind(&set_slice_header);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002918 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002919 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
2920 Immediate(String::kEmptyHashField));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002921 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
2922 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
2923 __ IncrementCounter(counters->sub_string_native(), 1);
2924 __ ret(3 * kPointerSize);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002925
2926 __ bind(&copy_routine);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002927 }
2928
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002929 // edi: underlying subject string
2930 // ebx: instance type of underlying subject string
2931 // edx: adjusted start index (smi)
2932 // ecx: length (smi)
2933 // The subject string can only be external or sequential string of either
2934 // encoding at this point.
2935 Label two_byte_sequential, runtime_drop_two, sequential_string;
2936 STATIC_ASSERT(kExternalStringTag != 0);
2937 STATIC_ASSERT(kSeqStringTag == 0);
2938 __ test_b(ebx, kExternalStringTag);
2939 __ j(zero, &sequential_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002940
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002941 // Handle external string.
2942 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002944 __ test_b(ebx, kShortExternalStringMask);
2945 __ j(not_zero, &runtime);
2946 __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
2947 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002948 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002949 __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2950
2951 __ bind(&sequential_string);
2952 // Stash away (adjusted) index and (underlying) string.
2953 __ push(edx);
2954 __ push(edi);
2955 __ SmiUntag(ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002957 __ test_b(ebx, kStringEncodingMask);
2958 __ j(zero, &two_byte_sequential);
2959
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002960 // Sequential one byte string. Allocate the result.
2961 __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002962
2963 // eax: result string
2964 // ecx: result string length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002965 // Locate first character of result.
2966 __ mov(edi, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002967 __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002968 // Load string argument and locate character of sub string start.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002969 __ pop(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002970 __ pop(ebx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002971 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002973
2974 // eax: result string
2975 // ecx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002976 // edi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002977 // edx: character of sub string start
2978 StringHelper::GenerateCopyCharacters(
2979 masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01002980 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002981 __ ret(3 * kPointerSize);
2982
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002983 __ bind(&two_byte_sequential);
2984 // Sequential two-byte string. Allocate the result.
2985 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002986
2987 // eax: result string
2988 // ecx: result string length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002989 // Locate first character of result.
2990 __ mov(edi, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002991 __ add(edi,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002992 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2993 // Load string argument and locate character of sub string start.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002994 __ pop(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002995 __ pop(ebx);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002996 // As from is a smi it is 2 times the value which matches the size of a two
2997 // byte character.
2998 STATIC_ASSERT(kSmiTag == 0);
2999 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003000 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003001
3002 // eax: result string
3003 // ecx: result length
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003004 // edi: first character of result
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003005 // edx: character of sub string start
3006 StringHelper::GenerateCopyCharacters(
3007 masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
Steve Block44f0eee2011-05-26 01:26:41 +01003008 __ IncrementCounter(counters->sub_string_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003009 __ ret(3 * kPointerSize);
3010
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003011 // Drop pushed values on the stack before tail call.
3012 __ bind(&runtime_drop_two);
3013 __ Drop(2);
3014
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003015 // Just jump to runtime to create the sub string.
3016 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003017 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003018
3019 __ bind(&single_char);
3020 // eax: string
3021 // ebx: instance type
3022 // ecx: sub string length (smi)
3023 // edx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003024 StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
3025 &runtime, STRING_INDEX_IS_NUMBER,
3026 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003027 generator.GenerateFast(masm);
3028 __ ret(3 * kPointerSize);
3029 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003030}
3031
3032
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003033void ToNumberStub::Generate(MacroAssembler* masm) {
3034 // The ToNumber stub takes one argument in eax.
3035 Label not_smi;
3036 __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
3037 __ Ret();
3038 __ bind(&not_smi);
3039
3040 Label not_heap_number;
3041 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
3042 __ j(not_equal, &not_heap_number, Label::kNear);
3043 __ Ret();
3044 __ bind(&not_heap_number);
3045
3046 Label not_string, slow_string;
3047 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
3048 // eax: object
3049 // edi: object map
3050 __ j(above_equal, &not_string, Label::kNear);
3051 // Check if string has a cached array index.
3052 __ test(FieldOperand(eax, String::kHashFieldOffset),
3053 Immediate(String::kContainsCachedArrayIndexMask));
3054 __ j(not_zero, &slow_string, Label::kNear);
3055 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3056 __ IndexFromHash(eax, eax);
3057 __ Ret();
3058 __ bind(&slow_string);
3059 __ pop(ecx); // Pop return address.
3060 __ push(eax); // Push argument.
3061 __ push(ecx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003062 __ TailCallRuntime(Runtime::kStringToNumber);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003063 __ bind(&not_string);
3064
3065 Label not_oddball;
3066 __ CmpInstanceType(edi, ODDBALL_TYPE);
3067 __ j(not_equal, &not_oddball, Label::kNear);
3068 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
3069 __ Ret();
3070 __ bind(&not_oddball);
3071
3072 __ pop(ecx); // Pop return address.
3073 __ push(eax); // Push argument.
3074 __ push(ecx); // Push return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003075 __ TailCallRuntime(Runtime::kToNumber);
3076}
3077
3078
3079void ToLengthStub::Generate(MacroAssembler* masm) {
3080 // The ToLength stub takes on argument in eax.
3081 Label not_smi, positive_smi;
3082 __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
3083 STATIC_ASSERT(kSmiTag == 0);
3084 __ test(eax, eax);
3085 __ j(greater_equal, &positive_smi, Label::kNear);
3086 __ xor_(eax, eax);
3087 __ bind(&positive_smi);
3088 __ Ret();
3089 __ bind(&not_smi);
3090
3091 __ pop(ecx); // Pop return address.
3092 __ push(eax); // Push argument.
3093 __ push(ecx); // Push return address.
3094 __ TailCallRuntime(Runtime::kToLength);
3095}
3096
3097
3098void ToStringStub::Generate(MacroAssembler* masm) {
3099 // The ToString stub takes one argument in eax.
3100 Label is_number;
3101 __ JumpIfSmi(eax, &is_number, Label::kNear);
3102
3103 Label not_string;
3104 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
3105 // eax: receiver
3106 // edi: receiver map
3107 __ j(above_equal, &not_string, Label::kNear);
3108 __ Ret();
3109 __ bind(&not_string);
3110
3111 Label not_heap_number;
3112 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
3113 __ j(not_equal, &not_heap_number, Label::kNear);
3114 __ bind(&is_number);
3115 NumberToStringStub stub(isolate());
3116 __ TailCallStub(&stub);
3117 __ bind(&not_heap_number);
3118
3119 Label not_oddball;
3120 __ CmpInstanceType(edi, ODDBALL_TYPE);
3121 __ j(not_equal, &not_oddball, Label::kNear);
3122 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
3123 __ Ret();
3124 __ bind(&not_oddball);
3125
3126 __ pop(ecx); // Pop return address.
3127 __ push(eax); // Push argument.
3128 __ push(ecx); // Push return address.
3129 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003130}
3131
3132
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003133void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3134 Register left,
3135 Register right,
3136 Register scratch1,
3137 Register scratch2) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003138 Register length = scratch1;
3139
3140 // Compare lengths.
3141 Label strings_not_equal, check_zero_length;
3142 __ mov(length, FieldOperand(left, String::kLengthOffset));
3143 __ cmp(length, FieldOperand(right, String::kLengthOffset));
3144 __ j(equal, &check_zero_length, Label::kNear);
3145 __ bind(&strings_not_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003146 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003147 __ ret(0);
3148
3149 // Check if the length is zero.
3150 Label compare_chars;
3151 __ bind(&check_zero_length);
3152 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003153 __ test(length, length);
Ben Murdoch257744e2011-11-30 15:57:28 +00003154 __ j(not_zero, &compare_chars, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003156 __ ret(0);
3157
3158 // Compare characters.
3159 __ bind(&compare_chars);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003160 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3161 &strings_not_equal, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003162
3163 // Characters are equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003164 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003165 __ ret(0);
3166}
3167
3168
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003169void StringHelper::GenerateCompareFlatOneByteStrings(
3170 MacroAssembler* masm, Register left, Register right, Register scratch1,
3171 Register scratch2, Register scratch3) {
Steve Block44f0eee2011-05-26 01:26:41 +01003172 Counters* counters = masm->isolate()->counters();
3173 __ IncrementCounter(counters->string_compare_native(), 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003174
3175 // Find minimum length.
Ben Murdoch257744e2011-11-30 15:57:28 +00003176 Label left_shorter;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003177 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
3178 __ mov(scratch3, scratch1);
3179 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
3180
3181 Register length_delta = scratch3;
3182
Ben Murdoch257744e2011-11-30 15:57:28 +00003183 __ j(less_equal, &left_shorter, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003184 // Right string is shorter. Change scratch1 to be length of right string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003185 __ sub(scratch1, length_delta);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003186 __ bind(&left_shorter);
3187
3188 Register min_length = scratch1;
3189
3190 // If either length is zero, just compare lengths.
Ben Murdoch257744e2011-11-30 15:57:28 +00003191 Label compare_lengths;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003192 __ test(min_length, min_length);
Ben Murdoch257744e2011-11-30 15:57:28 +00003193 __ j(zero, &compare_lengths, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003194
Ben Murdoch257744e2011-11-30 15:57:28 +00003195 // Compare characters.
3196 Label result_not_equal;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003197 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
3198 &result_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003199
3200 // Compare lengths - strings up to min-length are equal.
3201 __ bind(&compare_lengths);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003202 __ test(length_delta, length_delta);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003203 Label length_not_equal;
3204 __ j(not_zero, &length_not_equal, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003205
3206 // Result is EQUAL.
3207 STATIC_ASSERT(EQUAL == 0);
3208 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003209 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003210 __ ret(0);
3211
Ben Murdoch257744e2011-11-30 15:57:28 +00003212 Label result_greater;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003213 Label result_less;
3214 __ bind(&length_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00003215 __ j(greater, &result_greater, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003216 __ jmp(&result_less, Label::kNear);
3217 __ bind(&result_not_equal);
3218 __ j(above, &result_greater, Label::kNear);
3219 __ bind(&result_less);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003220
3221 // Result is LESS.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003222 __ Move(eax, Immediate(Smi::FromInt(LESS)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003223 __ ret(0);
3224
3225 // Result is GREATER.
3226 __ bind(&result_greater);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003227 __ Move(eax, Immediate(Smi::FromInt(GREATER)));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003228 __ ret(0);
3229}
3230
3231
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003232void StringHelper::GenerateOneByteCharsCompareLoop(
3233 MacroAssembler* masm, Register left, Register right, Register length,
3234 Register scratch, Label* chars_not_equal,
Ben Murdoch257744e2011-11-30 15:57:28 +00003235 Label::Distance chars_not_equal_near) {
3236 // Change index to run from -length to -1 by adding length to string
3237 // start. This means that loop ends when index reaches zero, which
3238 // doesn't need an additional compare.
3239 __ SmiUntag(length);
3240 __ lea(left,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003241 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003242 __ lea(right,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003244 __ neg(length);
3245 Register index = length; // index = -length;
3246
3247 // Compare loop.
3248 Label loop;
3249 __ bind(&loop);
3250 __ mov_b(scratch, Operand(left, index, times_1, 0));
3251 __ cmpb(scratch, Operand(right, index, times_1, 0));
3252 __ j(not_equal, chars_not_equal, chars_not_equal_near);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003253 __ inc(index);
Ben Murdoch257744e2011-11-30 15:57:28 +00003254 __ j(not_zero, &loop);
3255}
3256
3257
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003258void StringCompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003259 // ----------- S t a t e -------------
3260 // -- edx : left string
3261 // -- eax : right string
3262 // -- esp[0] : return address
3263 // -----------------------------------
3264 __ AssertString(edx);
3265 __ AssertString(eax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003266
Ben Murdoch257744e2011-11-30 15:57:28 +00003267 Label not_same;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003268 __ cmp(edx, eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003269 __ j(not_equal, &not_same, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003270 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3271 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003272 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003273
3274 __ bind(&not_same);
3275
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003276 // Check that both objects are sequential one-byte strings.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003277 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003278 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003280 // Compare flat one-byte strings.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003281 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003282 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
3283 edi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003284
3285 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3286 // tagged as a small integer.
3287 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003288 __ PopReturnAddressTo(ecx);
3289 __ Push(edx);
3290 __ Push(eax);
3291 __ PushReturnAddressFrom(ecx);
3292 __ TailCallRuntime(Runtime::kStringCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003293}
3294
Ben Murdochb0fe1622011-05-05 13:52:32 +01003295
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003296void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3297 // ----------- S t a t e -------------
3298 // -- edx : left
3299 // -- eax : right
3300 // -- esp[0] : return address
3301 // -----------------------------------
3302
3303 // Load ecx with the allocation site. We stick an undefined dummy value here
3304 // and replace it with the real allocation site later when we instantiate this
3305 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3306 __ mov(ecx, handle(isolate()->heap()->undefined_value()));
3307
3308 // Make sure that we actually patched the allocation site.
3309 if (FLAG_debug_code) {
3310 __ test(ecx, Immediate(kSmiTagMask));
3311 __ Assert(not_equal, kExpectedAllocationSite);
3312 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
3313 isolate()->factory()->allocation_site_map());
3314 __ Assert(equal, kExpectedAllocationSite);
3315 }
3316
3317 // Tail call into the stub that handles binary operations with allocation
3318 // sites.
3319 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3320 __ TailCallStub(&stub);
3321}
3322
3323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003324void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3325 DCHECK_EQ(CompareICState::BOOLEAN, state());
3326 Label miss;
3327 Label::Distance const miss_distance =
3328 masm->emit_debug_code() ? Label::kFar : Label::kNear;
3329
3330 __ JumpIfSmi(edx, &miss, miss_distance);
3331 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
3332 __ JumpIfSmi(eax, &miss, miss_distance);
3333 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3334 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
3335 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
3336 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3337 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3338 } else {
3339 if (!Token::IsEqualityOp(op())) {
3340 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
3341 __ AssertSmi(eax);
3342 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
3343 __ AssertSmi(edx);
3344 __ push(eax);
3345 __ mov(eax, edx);
3346 __ pop(edx);
3347 }
3348 __ sub(eax, edx);
3349 __ Ret();
3350 }
3351
3352 __ bind(&miss);
3353 GenerateMiss(masm);
3354}
3355
3356
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3358 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00003359 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003360 __ mov(ecx, edx);
3361 __ or_(ecx, eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003362 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003363
3364 if (GetCondition() == equal) {
3365 // For equality we do not care about the sign of the result.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003366 __ sub(eax, edx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003367 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003368 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003369 __ sub(edx, eax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003370 __ j(no_overflow, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003371 // Correct sign of result in case of overflow.
3372 __ not_(edx);
3373 __ bind(&done);
3374 __ mov(eax, edx);
3375 }
3376 __ ret(0);
3377
3378 __ bind(&miss);
3379 GenerateMiss(masm);
3380}
3381
3382
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3384 DCHECK(state() == CompareICState::NUMBER);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003385
Ben Murdoch257744e2011-11-30 15:57:28 +00003386 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003387 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00003388 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003389
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003390 if (left() == CompareICState::SMI) {
3391 __ JumpIfNotSmi(edx, &miss);
3392 }
3393 if (right() == CompareICState::SMI) {
3394 __ JumpIfNotSmi(eax, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003395 }
3396
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003397 // Load left and right operand.
3398 Label done, left, left_smi, right_smi;
3399 __ JumpIfSmi(eax, &right_smi, Label::kNear);
3400 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3401 isolate()->factory()->heap_number_map());
3402 __ j(not_equal, &maybe_undefined1, Label::kNear);
3403 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3404 __ jmp(&left, Label::kNear);
3405 __ bind(&right_smi);
3406 __ mov(ecx, eax); // Can't clobber eax because we can still jump away.
3407 __ SmiUntag(ecx);
3408 __ Cvtsi2sd(xmm1, ecx);
3409
3410 __ bind(&left);
3411 __ JumpIfSmi(edx, &left_smi, Label::kNear);
3412 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3413 isolate()->factory()->heap_number_map());
3414 __ j(not_equal, &maybe_undefined2, Label::kNear);
3415 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3416 __ jmp(&done);
3417 __ bind(&left_smi);
3418 __ mov(ecx, edx); // Can't clobber edx because we can still jump away.
3419 __ SmiUntag(ecx);
3420 __ Cvtsi2sd(xmm0, ecx);
3421
3422 __ bind(&done);
3423 // Compare operands.
3424 __ ucomisd(xmm0, xmm1);
3425
3426 // Don't base result on EFLAGS when a NaN is involved.
3427 __ j(parity_even, &unordered, Label::kNear);
3428
3429 // Return a result of -1, 0, or 1, based on EFLAGS.
3430 // Performing mov, because xor would destroy the flag register.
3431 __ mov(eax, 0); // equal
3432 __ mov(ecx, Immediate(Smi::FromInt(1)));
3433 __ cmov(above, eax, ecx);
3434 __ mov(ecx, Immediate(Smi::FromInt(-1)));
3435 __ cmov(below, eax, ecx);
3436 __ ret(0);
3437
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003438 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003439 __ bind(&generic_stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003440 CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003441 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003442 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3443
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003444 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003445 if (Token::IsOrderedRelationalCompareOp(op())) {
3446 __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003447 __ j(not_equal, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003448 __ JumpIfSmi(edx, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003449 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
3450 __ j(not_equal, &maybe_undefined2, Label::kNear);
3451 __ jmp(&unordered);
3452 }
3453
3454 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003455 if (Token::IsOrderedRelationalCompareOp(op())) {
3456 __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003457 __ j(equal, &unordered);
3458 }
3459
Ben Murdochb0fe1622011-05-05 13:52:32 +01003460 __ bind(&miss);
3461 GenerateMiss(masm);
3462}
3463
3464
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003465void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3466 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3467 DCHECK(GetCondition() == equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00003468
3469 // Registers containing left and right operands respectively.
3470 Register left = edx;
3471 Register right = eax;
3472 Register tmp1 = ecx;
3473 Register tmp2 = ebx;
3474
3475 // Check that both operands are heap objects.
3476 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003477 __ mov(tmp1, left);
Ben Murdoch257744e2011-11-30 15:57:28 +00003478 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003479 __ and_(tmp1, right);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003480 __ JumpIfSmi(tmp1, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003481
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003482 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00003483 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3484 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3485 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3486 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003487 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3488 __ or_(tmp1, tmp2);
3489 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3490 __ j(not_zero, &miss, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003491
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003492 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003493 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003494 __ cmp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003495 // Make sure eax is non-zero. At this point input operands are
3496 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003497 DCHECK(right.is(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00003498 __ j(not_equal, &done, Label::kNear);
3499 STATIC_ASSERT(EQUAL == 0);
3500 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003501 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003502 __ bind(&done);
3503 __ ret(0);
3504
3505 __ bind(&miss);
3506 GenerateMiss(masm);
3507}
3508
3509
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003510void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3511 DCHECK(state() == CompareICState::UNIQUE_NAME);
3512 DCHECK(GetCondition() == equal);
3513
3514 // Registers containing left and right operands respectively.
3515 Register left = edx;
3516 Register right = eax;
3517 Register tmp1 = ecx;
3518 Register tmp2 = ebx;
3519
3520 // Check that both operands are heap objects.
3521 Label miss;
3522 __ mov(tmp1, left);
3523 STATIC_ASSERT(kSmiTag == 0);
3524 __ and_(tmp1, right);
3525 __ JumpIfSmi(tmp1, &miss, Label::kNear);
3526
3527 // Check that both operands are unique names. This leaves the instance
3528 // types loaded in tmp1 and tmp2.
3529 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3530 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3531 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3532 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3533
3534 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3535 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3536
3537 // Unique names are compared by identity.
3538 Label done;
3539 __ cmp(left, right);
3540 // Make sure eax is non-zero. At this point input operands are
3541 // guaranteed to be non-zero.
3542 DCHECK(right.is(eax));
3543 __ j(not_equal, &done, Label::kNear);
3544 STATIC_ASSERT(EQUAL == 0);
3545 STATIC_ASSERT(kSmiTag == 0);
3546 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3547 __ bind(&done);
3548 __ ret(0);
3549
3550 __ bind(&miss);
3551 GenerateMiss(masm);
3552}
3553
3554
3555void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3556 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003557 Label miss;
3558
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003559 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003560
Ben Murdoch257744e2011-11-30 15:57:28 +00003561 // Registers containing left and right operands respectively.
3562 Register left = edx;
3563 Register right = eax;
3564 Register tmp1 = ecx;
3565 Register tmp2 = ebx;
3566 Register tmp3 = edi;
3567
3568 // Check that both operands are heap objects.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003569 __ mov(tmp1, left);
Ben Murdoch257744e2011-11-30 15:57:28 +00003570 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003571 __ and_(tmp1, right);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003572 __ JumpIfSmi(tmp1, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003573
3574 // Check that both operands are strings. This leaves the instance
3575 // types loaded in tmp1 and tmp2.
3576 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3577 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3578 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3579 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3580 __ mov(tmp3, tmp1);
3581 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003582 __ or_(tmp3, tmp2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003583 __ test(tmp3, Immediate(kIsNotStringMask));
3584 __ j(not_zero, &miss);
3585
3586 // Fast check for identical strings.
3587 Label not_same;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003588 __ cmp(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003589 __ j(not_equal, &not_same, Label::kNear);
3590 STATIC_ASSERT(EQUAL == 0);
3591 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003592 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003593 __ ret(0);
3594
3595 // Handle not identical strings.
3596 __ bind(&not_same);
3597
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003598 // Check that both strings are internalized. If they are, we're done
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003599 // because we already know they are not identical. But in the case of
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003600 // non-equality compare, we still need to determine the order. We
3601 // also know they are both strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003602 if (equality) {
3603 Label do_compare;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003604 STATIC_ASSERT(kInternalizedTag == 0);
3605 __ or_(tmp1, tmp2);
3606 __ test(tmp1, Immediate(kIsNotInternalizedMask));
3607 __ j(not_zero, &do_compare, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003608 // Make sure eax is non-zero. At this point input operands are
3609 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003610 DCHECK(right.is(eax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003611 __ ret(0);
3612 __ bind(&do_compare);
3613 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003614
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003615 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003616 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003617 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003618
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003619 // Compare flat one byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003620 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003621 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3622 tmp2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003623 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003624 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3625 tmp2, tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003626 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003627
3628 // Handle more complex cases in runtime.
3629 __ bind(&runtime);
3630 __ pop(tmp1); // Return address.
3631 __ push(left);
3632 __ push(right);
3633 __ push(tmp1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003634 if (equality) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003635 __ TailCallRuntime(Runtime::kStringEquals);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003636 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003637 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003638 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003639
3640 __ bind(&miss);
3641 GenerateMiss(masm);
3642}
3643
3644
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003645void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3646 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00003647 Label miss;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003648 __ mov(ecx, edx);
3649 __ and_(ecx, eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003650 __ JumpIfSmi(ecx, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003651
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003652 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3653 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
3654 __ j(below, &miss, Label::kNear);
3655 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
3656 __ j(below, &miss, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003657
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003658 DCHECK_EQ(equal, GetCondition());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003659 __ sub(eax, edx);
3660 __ ret(0);
3661
3662 __ bind(&miss);
3663 GenerateMiss(masm);
3664}
3665
3666
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003667void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003668 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003669 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003670 __ mov(ecx, edx);
3671 __ and_(ecx, eax);
3672 __ JumpIfSmi(ecx, &miss, Label::kNear);
3673
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003674 __ GetWeakValue(edi, cell);
3675 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003676 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003677 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003678 __ j(not_equal, &miss, Label::kNear);
3679
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003680 if (Token::IsEqualityOp(op())) {
3681 __ sub(eax, edx);
3682 __ ret(0);
3683 } else if (is_strong(strength())) {
3684 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3685 } else {
3686 __ PopReturnAddressTo(ecx);
3687 __ Push(edx);
3688 __ Push(eax);
3689 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
3690 __ PushReturnAddressFrom(ecx);
3691 __ TailCallRuntime(Runtime::kCompare);
3692 }
Ben Murdochc7cc0282012-03-05 14:35:55 +00003693
3694 __ bind(&miss);
3695 GenerateMiss(masm);
3696}
3697
3698
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003699void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003700 {
3701 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003702 FrameScope scope(masm, StackFrame::INTERNAL);
3703 __ push(edx); // Preserve edx and eax.
3704 __ push(eax);
3705 __ push(edx); // And also use them as the arguments.
3706 __ push(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003707 __ push(Immediate(Smi::FromInt(op())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003708 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003709 // Compute the entry point of the rewritten stub.
3710 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
3711 __ pop(eax);
3712 __ pop(edx);
3713 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003714
Ben Murdochb0fe1622011-05-05 13:52:32 +01003715 // Do a tail call to the rewritten stub.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003716 __ jmp(edi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003717}
3718
3719
Ben Murdoch257744e2011-11-30 15:57:28 +00003720// Helper function used to check that the dictionary doesn't contain
3721// the property. This function may return false negatives, so miss_label
3722// must always call a backup property check that is complete.
3723// This function is safe to call if the receiver has fast properties.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003724// Name must be a unique name and receiver must be a heap object.
3725void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3726 Label* miss,
3727 Label* done,
3728 Register properties,
3729 Handle<Name> name,
3730 Register r0) {
3731 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003732
3733 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3734 // not equal to the name and kProbes-th slot is not used (its name is the
3735 // undefined value), it guarantees the hash table doesn't contain the
3736 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003737 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003738 for (int i = 0; i < kInlinedProbes; i++) {
3739 // Compute the masked index: (hash + i + i * i) & mask.
3740 Register index = r0;
3741 // Capacity is smi 2^n.
3742 __ mov(index, FieldOperand(properties, kCapacityOffset));
3743 __ dec(index);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003744 __ and_(index,
3745 Immediate(Smi::FromInt(name->Hash() +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003746 NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003747
3748 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003749 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003750 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
3751 Register entity_name = r0;
3752 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003753 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003754 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
3755 kElementsStartOffset - kHeapObjectTag));
3756 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3757 __ j(equal, done);
3758
3759 // Stop if found the property.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003760 __ cmp(entity_name, Handle<Name>(name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003761 __ j(equal, miss);
3762
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003763 Label good;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003764 // Check for the hole and skip.
3765 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003766 __ j(equal, &good, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003767
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003768 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003769 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003770 __ JumpIfNotUniqueNameInstanceType(
3771 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3772 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003773 }
3774
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003775 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3776 NEGATIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003777 __ push(Immediate(Handle<Object>(name)));
3778 __ push(Immediate(name->Hash()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003779 __ CallStub(&stub);
3780 __ test(r0, r0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003781 __ j(not_zero, miss);
3782 __ jmp(done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003783}
3784
3785
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003787// |done| label if a property with the given name is found leaving the
3788// index into the dictionary in |r0|. Jump to the |miss| label
3789// otherwise.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003790void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3791 Label* miss,
3792 Label* done,
3793 Register elements,
3794 Register name,
3795 Register r0,
3796 Register r1) {
3797 DCHECK(!elements.is(r0));
3798 DCHECK(!elements.is(r1));
3799 DCHECK(!name.is(r0));
3800 DCHECK(!name.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003802 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003803
3804 __ mov(r1, FieldOperand(elements, kCapacityOffset));
3805 __ shr(r1, kSmiTagSize); // convert smi to int
3806 __ dec(r1);
3807
3808 // Generate an unrolled loop that performs a few probes before
3809 // giving up. Measurements done on Gmail indicate that 2 probes
3810 // cover ~93% of loads from dictionaries.
3811 for (int i = 0; i < kInlinedProbes; i++) {
3812 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003813 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3814 __ shr(r0, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003815 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003816 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003817 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003818 __ and_(r0, r1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003819
3820 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003821 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003822 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
3823
3824 // Check if the key is identical to the name.
3825 __ cmp(name, Operand(elements,
3826 r0,
3827 times_4,
3828 kElementsStartOffset - kHeapObjectTag));
3829 __ j(equal, done);
3830 }
3831
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003832 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
3833 POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003834 __ push(name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003835 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3836 __ shr(r0, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003837 __ push(r0);
3838 __ CallStub(&stub);
3839
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003840 __ test(r1, r1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003841 __ j(zero, miss);
3842 __ jmp(done);
3843}
3844
3845
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003846void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003847 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3848 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003849 // Stack frame on entry:
3850 // esp[0 * kPointerSize]: return address.
3851 // esp[1 * kPointerSize]: key's hash.
3852 // esp[2 * kPointerSize]: key.
3853 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003854 // dictionary_: NameDictionary to probe.
Ben Murdoch257744e2011-11-30 15:57:28 +00003855 // result_: used as scratch.
3856 // index_: will hold an index of entry if lookup is successful.
3857 // might alias with result_.
3858 // Returns:
3859 // result_ is zero if lookup failed, non zero otherwise.
3860
3861 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3862
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003863 Register scratch = result();
Ben Murdoch257744e2011-11-30 15:57:28 +00003864
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003865 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003866 __ dec(scratch);
3867 __ SmiUntag(scratch);
3868 __ push(scratch);
3869
3870 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3871 // not equal to the name and kProbes-th slot is not used (its name is the
3872 // undefined value), it guarantees the hash table doesn't contain the
3873 // property. It's true even if some slots represent deleted properties
3874 // (their names are the null value).
3875 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3876 // Compute the masked index: (hash + i + i * i) & mask.
3877 __ mov(scratch, Operand(esp, 2 * kPointerSize));
3878 if (i > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003880 }
3881 __ and_(scratch, Operand(esp, 0));
3882
3883 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003884 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003885 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
Ben Murdoch257744e2011-11-30 15:57:28 +00003886
3887 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003888 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003889 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
Ben Murdoch257744e2011-11-30 15:57:28 +00003890 kElementsStartOffset - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003891 __ cmp(scratch, isolate()->factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003892 __ j(equal, &not_in_dictionary);
3893
3894 // Stop if found the property.
3895 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
3896 __ j(equal, &in_dictionary);
3897
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003898 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3899 // If we hit a key that is not a unique name during negative
3900 // lookup we have to bailout as this key might be equal to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003901 // key we are looking for.
3902
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003903 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003904 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003905 __ JumpIfNotUniqueNameInstanceType(
3906 FieldOperand(scratch, Map::kInstanceTypeOffset),
3907 &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003908 }
3909 }
3910
3911 __ bind(&maybe_in_dictionary);
3912 // If we are doing negative lookup then probing failure should be
3913 // treated as a lookup success. For positive lookup probing failure
3914 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003915 if (mode() == POSITIVE_LOOKUP) {
3916 __ mov(result(), Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003917 __ Drop(1);
3918 __ ret(2 * kPointerSize);
3919 }
3920
3921 __ bind(&in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003922 __ mov(result(), Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00003923 __ Drop(1);
3924 __ ret(2 * kPointerSize);
3925
3926 __ bind(&not_in_dictionary);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003927 __ mov(result(), Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003928 __ Drop(1);
3929 __ ret(2 * kPointerSize);
3930}
3931
3932
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003933void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3934 Isolate* isolate) {
3935 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
3936 stub.GetCode();
3937 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3938 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003939}
3940
3941
3942// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3943// the value has just been written into the object, now this stub makes sure
3944// we keep the GC informed. The word in the object where the value has been
3945// written is in the address register.
3946void RecordWriteStub::Generate(MacroAssembler* masm) {
3947 Label skip_to_incremental_noncompacting;
3948 Label skip_to_incremental_compacting;
3949
3950 // The first two instructions are generated with labels so as to get the
3951 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3952 // forth between a compare instructions (a nop in this position) and the
3953 // real branch when we start and stop incremental heap marking.
3954 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3955 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3956
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003957 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3958 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003959 MacroAssembler::kReturnAtEnd);
3960 } else {
3961 __ ret(0);
3962 }
3963
3964 __ bind(&skip_to_incremental_noncompacting);
3965 GenerateIncremental(masm, INCREMENTAL);
3966
3967 __ bind(&skip_to_incremental_compacting);
3968 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3969
3970 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3971 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3972 masm->set_byte_at(0, kTwoByteNopInstruction);
3973 masm->set_byte_at(2, kFiveByteNopInstruction);
3974}
3975
3976
3977void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3978 regs_.Save(masm);
3979
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003980 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003981 Label dont_need_remembered_set;
3982
3983 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3984 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3985 regs_.scratch0(),
3986 &dont_need_remembered_set);
3987
3988 __ CheckPageFlag(regs_.object(),
3989 regs_.scratch0(),
3990 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3991 not_zero,
3992 &dont_need_remembered_set);
3993
3994 // First notify the incremental marker if necessary, then update the
3995 // remembered set.
3996 CheckNeedsToInformIncrementalMarker(
3997 masm,
3998 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
3999 mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004000 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004001 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004002 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004003 MacroAssembler::kReturnAtEnd);
4004
4005 __ bind(&dont_need_remembered_set);
4006 }
4007
4008 CheckNeedsToInformIncrementalMarker(
4009 masm,
4010 kReturnOnNoNeedToInformIncrementalMarker,
4011 mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004012 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004013 regs_.Restore(masm);
4014 __ ret(0);
4015}
4016
4017
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004018void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4019 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004020 int argument_count = 3;
4021 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4022 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004023 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004024 __ mov(Operand(esp, 2 * kPointerSize),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004025 Immediate(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004026
4027 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004028 __ CallCFunction(
4029 ExternalReference::incremental_marking_record_write_function(isolate()),
4030 argument_count);
4031
4032 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004033}
4034
4035
4036void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4037 MacroAssembler* masm,
4038 OnNoNeedToInformIncrementalMarker on_no_need,
4039 Mode mode) {
4040 Label object_is_black, need_incremental, need_incremental_pop_object;
4041
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004042 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4043 __ and_(regs_.scratch0(), regs_.object());
4044 __ mov(regs_.scratch1(),
4045 Operand(regs_.scratch0(),
4046 MemoryChunk::kWriteBarrierCounterOffset));
4047 __ sub(regs_.scratch1(), Immediate(1));
4048 __ mov(Operand(regs_.scratch0(),
4049 MemoryChunk::kWriteBarrierCounterOffset),
4050 regs_.scratch1());
4051 __ j(negative, &need_incremental);
4052
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004053 // Let's look at the color of the object: If it is not black we don't have
4054 // to inform the incremental marker.
4055 __ JumpIfBlack(regs_.object(),
4056 regs_.scratch0(),
4057 regs_.scratch1(),
4058 &object_is_black,
4059 Label::kNear);
4060
4061 regs_.Restore(masm);
4062 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004063 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004064 MacroAssembler::kReturnAtEnd);
4065 } else {
4066 __ ret(0);
4067 }
4068
4069 __ bind(&object_is_black);
4070
4071 // Get the value from the slot.
4072 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4073
4074 if (mode == INCREMENTAL_COMPACTION) {
4075 Label ensure_not_white;
4076
4077 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4078 regs_.scratch1(), // Scratch.
4079 MemoryChunk::kEvacuationCandidateMask,
4080 zero,
4081 &ensure_not_white,
4082 Label::kNear);
4083
4084 __ CheckPageFlag(regs_.object(),
4085 regs_.scratch1(), // Scratch.
4086 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4087 not_zero,
4088 &ensure_not_white,
4089 Label::kNear);
4090
4091 __ jmp(&need_incremental);
4092
4093 __ bind(&ensure_not_white);
4094 }
4095
4096 // We need an extra register for this, so we push the object register
4097 // temporarily.
4098 __ push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004099 __ JumpIfWhite(regs_.scratch0(), // The value.
4100 regs_.scratch1(), // Scratch.
4101 regs_.object(), // Scratch.
4102 &need_incremental_pop_object, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004103 __ pop(regs_.object());
4104
4105 regs_.Restore(masm);
4106 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004107 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004108 MacroAssembler::kReturnAtEnd);
4109 } else {
4110 __ ret(0);
4111 }
4112
4113 __ bind(&need_incremental_pop_object);
4114 __ pop(regs_.object());
4115
4116 __ bind(&need_incremental);
4117
4118 // Fall through when we need to inform the incremental marker.
4119}
4120
4121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004122void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4123 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4124 __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
4125 int parameter_count_offset =
4126 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4127 __ mov(ebx, MemOperand(ebp, parameter_count_offset));
4128 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4129 __ pop(ecx);
4130 int additional_offset =
4131 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
4132 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
4133 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
4134}
4135
4136
4137void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004138 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4139 LoadICStub stub(isolate(), state());
4140 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004141}
4142
4143
4144void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004145 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4146 KeyedLoadICStub stub(isolate(), state());
4147 stub.GenerateForTrampoline(masm);
4148}
4149
4150
4151static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4152 Register key, Register vector, Register slot,
4153 Register feedback, bool is_polymorphic,
4154 Label* miss) {
4155 // feedback initially contains the feedback array
4156 Label next, next_loop, prepare_next;
4157 Label load_smi_map, compare_map;
4158 Label start_polymorphic;
4159
4160 __ push(receiver);
4161 __ push(vector);
4162
4163 Register receiver_map = receiver;
4164 Register cached_map = vector;
4165
4166 // Receiver might not be a heap object.
4167 __ JumpIfSmi(receiver, &load_smi_map);
4168 __ mov(receiver_map, FieldOperand(receiver, 0));
4169 __ bind(&compare_map);
4170 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4171
4172 // A named keyed load might have a 2 element array, all other cases can count
4173 // on an array with at least 2 {map, handler} pairs, so they can go right
4174 // into polymorphic array handling.
4175 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4176 __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
4177
4178 // found, now call handler.
4179 Register handler = feedback;
4180 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4181 __ pop(vector);
4182 __ pop(receiver);
4183 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4184 __ jmp(handler);
4185
4186 if (!is_polymorphic) {
4187 __ bind(&next);
4188 __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
4189 Immediate(Smi::FromInt(2)));
4190 __ j(not_equal, &start_polymorphic);
4191 __ pop(vector);
4192 __ pop(receiver);
4193 __ jmp(miss);
4194 }
4195
4196 // Polymorphic, we have to loop from 2 to N
4197 __ bind(&start_polymorphic);
4198 __ push(key);
4199 Register counter = key;
4200 __ mov(counter, Immediate(Smi::FromInt(2)));
4201 __ bind(&next_loop);
4202 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4203 FixedArray::kHeaderSize));
4204 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4205 __ j(not_equal, &prepare_next);
4206 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
4207 FixedArray::kHeaderSize + kPointerSize));
4208 __ pop(key);
4209 __ pop(vector);
4210 __ pop(receiver);
4211 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4212 __ jmp(handler);
4213
4214 __ bind(&prepare_next);
4215 __ add(counter, Immediate(Smi::FromInt(2)));
4216 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4217 __ j(less, &next_loop);
4218
4219 // We exhausted our array of map handler pairs.
4220 __ pop(key);
4221 __ pop(vector);
4222 __ pop(receiver);
4223 __ jmp(miss);
4224
4225 __ bind(&load_smi_map);
4226 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4227 __ jmp(&compare_map);
4228}
4229
4230
4231static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
4232 Register key, Register vector, Register slot,
4233 Register weak_cell, Label* miss) {
4234 // feedback initially contains the feedback array
4235 Label compare_smi_map;
4236
4237 // Move the weak map into the weak_cell register.
4238 Register ic_map = weak_cell;
4239 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
4240
4241 // Receiver might not be a heap object.
4242 __ JumpIfSmi(receiver, &compare_smi_map);
4243 __ cmp(ic_map, FieldOperand(receiver, 0));
4244 __ j(not_equal, miss);
4245 Register handler = weak_cell;
4246 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
4247 FixedArray::kHeaderSize + kPointerSize));
4248 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4249 __ jmp(handler);
4250
4251 // In microbenchmarks, it made sense to unroll this code so that the call to
4252 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
4253 __ bind(&compare_smi_map);
4254 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
4255 __ j(not_equal, miss);
4256 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
4257 FixedArray::kHeaderSize + kPointerSize));
4258 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4259 __ jmp(handler);
4260}
4261
4262
4263void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4264
4265
4266void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4267 GenerateImpl(masm, true);
4268}
4269
4270
4271void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4272 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
4273 Register name = LoadWithVectorDescriptor::NameRegister(); // ecx
4274 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
4275 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
4276 Register scratch = edi;
4277 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4278 FixedArray::kHeaderSize));
4279
4280 // Is it a weak cell?
4281 Label try_array;
4282 Label not_array, smi_key, key_okay, miss;
4283 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4284 __ j(not_equal, &try_array);
4285 HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
4286
4287 // Is it a fixed array?
4288 __ bind(&try_array);
4289 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4290 __ j(not_equal, &not_array);
4291 HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
4292
4293 __ bind(&not_array);
4294 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4295 __ j(not_equal, &miss);
4296 __ push(slot);
4297 __ push(vector);
4298 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4299 Code::ComputeHandlerFlags(Code::LOAD_IC));
4300 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
4301 receiver, name, vector, scratch);
4302 __ pop(vector);
4303 __ pop(slot);
4304
4305 __ bind(&miss);
4306 LoadIC::GenerateMiss(masm);
4307}
4308
4309
4310void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4311 GenerateImpl(masm, false);
4312}
4313
4314
4315void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4316 GenerateImpl(masm, true);
4317}
4318
4319
4320void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4321 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
4322 Register key = LoadWithVectorDescriptor::NameRegister(); // ecx
4323 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
4324 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
4325 Register feedback = edi;
4326 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
4327 FixedArray::kHeaderSize));
4328 // Is it a weak cell?
4329 Label try_array;
4330 Label not_array, smi_key, key_okay, miss;
4331 __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
4332 __ j(not_equal, &try_array);
4333 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
4334
4335 __ bind(&try_array);
4336 // Is it a fixed array?
4337 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
4338 __ j(not_equal, &not_array);
4339
4340 // We have a polymorphic element handler.
4341 Label polymorphic, try_poly_name;
4342 __ bind(&polymorphic);
4343 HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
4344
4345 __ bind(&not_array);
4346 // Is it generic?
4347 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4348 __ j(not_equal, &try_poly_name);
4349 Handle<Code> megamorphic_stub =
4350 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4351 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4352
4353 __ bind(&try_poly_name);
4354 // We might have a name in feedback, and a fixed array in the next slot.
4355 __ cmp(key, feedback);
4356 __ j(not_equal, &miss);
4357 // If the name comparison succeeded, we know we have a fixed array with
4358 // at least one map/handler pair.
4359 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
4360 FixedArray::kHeaderSize + kPointerSize));
4361 HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
4362
4363 __ bind(&miss);
4364 KeyedLoadIC::GenerateMiss(masm);
4365}
4366
4367
4368void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4369 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4370 VectorStoreICStub stub(isolate(), state());
4371 stub.GenerateForTrampoline(masm);
4372}
4373
4374
4375void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4376 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4377 VectorKeyedStoreICStub stub(isolate(), state());
4378 stub.GenerateForTrampoline(masm);
4379}
4380
4381
4382void VectorStoreICStub::Generate(MacroAssembler* masm) {
4383 GenerateImpl(masm, false);
4384}
4385
4386
4387void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4388 GenerateImpl(masm, true);
4389}
4390
4391
4392// value is on the stack already.
4393static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
4394 Register key, Register vector,
4395 Register slot, Register feedback,
4396 bool is_polymorphic, Label* miss) {
4397 // feedback initially contains the feedback array
4398 Label next, next_loop, prepare_next;
4399 Label load_smi_map, compare_map;
4400 Label start_polymorphic;
4401 Label pop_and_miss;
4402 ExternalReference virtual_register =
4403 ExternalReference::virtual_handler_register(masm->isolate());
4404
4405 __ push(receiver);
4406 __ push(vector);
4407
4408 Register receiver_map = receiver;
4409 Register cached_map = vector;
4410
4411 // Receiver might not be a heap object.
4412 __ JumpIfSmi(receiver, &load_smi_map);
4413 __ mov(receiver_map, FieldOperand(receiver, 0));
4414 __ bind(&compare_map);
4415 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4416
4417 // A named keyed store might have a 2 element array, all other cases can count
4418 // on an array with at least 2 {map, handler} pairs, so they can go right
4419 // into polymorphic array handling.
4420 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4421 __ j(not_equal, &start_polymorphic);
4422
4423 // found, now call handler.
4424 Register handler = feedback;
4425 DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
4426 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4427 __ pop(vector);
4428 __ pop(receiver);
4429 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4430 __ mov(Operand::StaticVariable(virtual_register), handler);
4431 __ pop(handler); // Pop "value".
4432 __ jmp(Operand::StaticVariable(virtual_register));
4433
4434 // Polymorphic, we have to loop from 2 to N
4435 __ bind(&start_polymorphic);
4436 __ push(key);
4437 Register counter = key;
4438 __ mov(counter, Immediate(Smi::FromInt(2)));
4439
4440 if (!is_polymorphic) {
4441 // If is_polymorphic is false, we may only have a two element array.
4442 // Check against length now in that case.
4443 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4444 __ j(greater_equal, &pop_and_miss);
4445 }
4446
4447 __ bind(&next_loop);
4448 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4449 FixedArray::kHeaderSize));
4450 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4451 __ j(not_equal, &prepare_next);
4452 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
4453 FixedArray::kHeaderSize + kPointerSize));
4454 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4455 __ pop(key);
4456 __ pop(vector);
4457 __ pop(receiver);
4458 __ mov(Operand::StaticVariable(virtual_register), handler);
4459 __ pop(handler); // Pop "value".
4460 __ jmp(Operand::StaticVariable(virtual_register));
4461
4462 __ bind(&prepare_next);
4463 __ add(counter, Immediate(Smi::FromInt(2)));
4464 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4465 __ j(less, &next_loop);
4466
4467 // We exhausted our array of map handler pairs.
4468 __ bind(&pop_and_miss);
4469 __ pop(key);
4470 __ pop(vector);
4471 __ pop(receiver);
4472 __ jmp(miss);
4473
4474 __ bind(&load_smi_map);
4475 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4476 __ jmp(&compare_map);
4477}
4478
4479
4480static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
4481 Register key, Register vector,
4482 Register slot, Register weak_cell,
4483 Label* miss) {
4484 // The store ic value is on the stack.
4485 DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
4486 ExternalReference virtual_register =
4487 ExternalReference::virtual_handler_register(masm->isolate());
4488
4489 // feedback initially contains the feedback array
4490 Label compare_smi_map;
4491
4492 // Move the weak map into the weak_cell register.
4493 Register ic_map = weak_cell;
4494 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
4495
4496 // Receiver might not be a heap object.
4497 __ JumpIfSmi(receiver, &compare_smi_map);
4498 __ cmp(ic_map, FieldOperand(receiver, 0));
4499 __ j(not_equal, miss);
4500 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
4501 FixedArray::kHeaderSize + kPointerSize));
4502 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
4503 // Put the store ic value back in it's register.
4504 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
4505 __ pop(weak_cell); // Pop "value".
4506 // jump to the handler.
4507 __ jmp(Operand::StaticVariable(virtual_register));
4508
4509 // In microbenchmarks, it made sense to unroll this code so that the call to
4510 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
4511 __ bind(&compare_smi_map);
4512 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
4513 __ j(not_equal, miss);
4514 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
4515 FixedArray::kHeaderSize + kPointerSize));
4516 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
4517 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
4518 __ pop(weak_cell); // Pop "value".
4519 // jump to the handler.
4520 __ jmp(Operand::StaticVariable(virtual_register));
4521}
4522
4523
4524void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4525 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
4526 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
4527 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
4528 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
4529 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
4530 Label miss;
4531
4532 __ push(value);
4533
4534 Register scratch = value;
4535 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4536 FixedArray::kHeaderSize));
4537
4538 // Is it a weak cell?
4539 Label try_array;
4540 Label not_array, smi_key, key_okay;
4541 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4542 __ j(not_equal, &try_array);
4543 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
4544
4545 // Is it a fixed array?
4546 __ bind(&try_array);
4547 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4548 __ j(not_equal, &not_array);
4549 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
4550 &miss);
4551
4552 __ bind(&not_array);
4553 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4554 __ j(not_equal, &miss);
4555
4556 __ pop(value);
4557 __ push(slot);
4558 __ push(vector);
4559 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4560 Code::ComputeHandlerFlags(Code::STORE_IC));
4561 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
4562 receiver, key, slot, no_reg);
4563 __ pop(vector);
4564 __ pop(slot);
4565 Label no_pop_miss;
4566 __ jmp(&no_pop_miss);
4567
4568 __ bind(&miss);
4569 __ pop(value);
4570 __ bind(&no_pop_miss);
4571 StoreIC::GenerateMiss(masm);
4572}
4573
4574
4575void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4576 GenerateImpl(masm, false);
4577}
4578
4579
4580void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4581 GenerateImpl(masm, true);
4582}
4583
4584
4585static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
4586 Register receiver, Register key,
4587 Register vector, Register slot,
4588 Register feedback, Label* miss) {
4589 // feedback initially contains the feedback array
4590 Label next, next_loop, prepare_next;
4591 Label load_smi_map, compare_map;
4592 Label transition_call;
4593 Label pop_and_miss;
4594 ExternalReference virtual_register =
4595 ExternalReference::virtual_handler_register(masm->isolate());
4596 ExternalReference virtual_slot =
4597 ExternalReference::virtual_slot_register(masm->isolate());
4598
4599 __ push(receiver);
4600 __ push(vector);
4601
4602 Register receiver_map = receiver;
4603 Register cached_map = vector;
4604 Register value = StoreDescriptor::ValueRegister();
4605
4606 // Receiver might not be a heap object.
4607 __ JumpIfSmi(receiver, &load_smi_map);
4608 __ mov(receiver_map, FieldOperand(receiver, 0));
4609 __ bind(&compare_map);
4610
4611 // Polymorphic, we have to loop from 0 to N - 1
4612 __ push(key);
4613 // Current stack layout:
4614 // - esp[0] -- key
4615 // - esp[4] -- vector
4616 // - esp[8] -- receiver
4617 // - esp[12] -- value
4618 // - esp[16] -- return address
4619 //
4620 // Required stack layout for handler call:
4621 // - esp[0] -- return address
4622 // - receiver, key, value, vector, slot in registers.
4623 // - handler in virtual register.
4624 Register counter = key;
4625 __ mov(counter, Immediate(Smi::FromInt(0)));
4626 __ bind(&next_loop);
4627 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4628 FixedArray::kHeaderSize));
4629 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4630 __ j(not_equal, &prepare_next);
4631 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4632 FixedArray::kHeaderSize + kPointerSize));
4633 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
4634 __ j(not_equal, &transition_call);
4635 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
4636 FixedArray::kHeaderSize + 2 * kPointerSize));
4637 __ pop(key);
4638 __ pop(vector);
4639 __ pop(receiver);
4640 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
4641 __ mov(Operand::StaticVariable(virtual_register), feedback);
4642 __ pop(value);
4643 __ jmp(Operand::StaticVariable(virtual_register));
4644
4645 __ bind(&transition_call);
4646 // Current stack layout:
4647 // - esp[0] -- key
4648 // - esp[4] -- vector
4649 // - esp[8] -- receiver
4650 // - esp[12] -- value
4651 // - esp[16] -- return address
4652 //
4653 // Required stack layout for handler call:
4654 // - esp[0] -- return address
4655 // - receiver, key, value, map, vector in registers.
4656 // - handler and slot in virtual registers.
4657 __ mov(Operand::StaticVariable(virtual_slot), slot);
4658 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
4659 FixedArray::kHeaderSize + 2 * kPointerSize));
4660 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
4661 __ mov(Operand::StaticVariable(virtual_register), feedback);
4662
4663 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4664 // The weak cell may have been cleared.
4665 __ JumpIfSmi(cached_map, &pop_and_miss);
4666 DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
4667 __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
4668
4669 // Pop key into place.
4670 __ pop(key);
4671 __ pop(vector);
4672 __ pop(receiver);
4673 __ pop(value);
4674 __ jmp(Operand::StaticVariable(virtual_register));
4675
4676 __ bind(&prepare_next);
4677 __ add(counter, Immediate(Smi::FromInt(3)));
4678 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4679 __ j(less, &next_loop);
4680
4681 // We exhausted our array of map handler pairs.
4682 __ bind(&pop_and_miss);
4683 __ pop(key);
4684 __ pop(vector);
4685 __ pop(receiver);
4686 __ jmp(miss);
4687
4688 __ bind(&load_smi_map);
4689 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4690 __ jmp(&compare_map);
4691}
4692
4693
4694void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4695 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
4696 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
4697 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
4698 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
4699 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
4700 Label miss;
4701
4702 __ push(value);
4703
4704 Register scratch = value;
4705 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4706 FixedArray::kHeaderSize));
4707
4708 // Is it a weak cell?
4709 Label try_array;
4710 Label not_array, smi_key, key_okay;
4711 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4712 __ j(not_equal, &try_array);
4713 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
4714
4715 // Is it a fixed array?
4716 __ bind(&try_array);
4717 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4718 __ j(not_equal, &not_array);
4719 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
4720 &miss);
4721
4722 __ bind(&not_array);
4723 Label try_poly_name;
4724 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4725 __ j(not_equal, &try_poly_name);
4726
4727 __ pop(value);
4728
4729 Handle<Code> megamorphic_stub =
4730 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4731 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4732
4733 __ bind(&try_poly_name);
4734 // We might have a name in feedback, and a fixed array in the next slot.
4735 __ cmp(key, scratch);
4736 __ j(not_equal, &miss);
4737 // If the name comparison succeeded, we know we have a fixed array with
4738 // at least one map/handler pair.
4739 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4740 FixedArray::kHeaderSize + kPointerSize));
4741 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
4742 &miss);
4743
4744 __ bind(&miss);
4745 __ pop(value);
4746 KeyedStoreIC::GenerateMiss(masm);
4747}
4748
4749
4750void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4751 __ EmitLoadTypeFeedbackVector(ebx);
4752 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004753 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4754}
4755
4756
4757void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4758 if (masm->isolate()->function_entry_hook() != NULL) {
4759 ProfileEntryHookStub stub(masm->isolate());
4760 masm->CallStub(&stub);
4761 }
4762}
4763
4764
4765void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4766 // Save volatile registers.
4767 const int kNumSavedRegisters = 3;
4768 __ push(eax);
4769 __ push(ecx);
4770 __ push(edx);
4771
4772 // Calculate and push the original stack pointer.
4773 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4774 __ push(eax);
4775
4776 // Retrieve our return address and use it to calculate the calling
4777 // function's address.
4778 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4779 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4780 __ push(eax);
4781
4782 // Call the entry hook.
4783 DCHECK(isolate()->function_entry_hook() != NULL);
4784 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
4785 RelocInfo::RUNTIME_ENTRY);
4786 __ add(esp, Immediate(2 * kPointerSize));
4787
4788 // Restore ecx.
4789 __ pop(edx);
4790 __ pop(ecx);
4791 __ pop(eax);
4792
4793 __ ret(0);
4794}
4795
4796
4797template<class T>
4798static void CreateArrayDispatch(MacroAssembler* masm,
4799 AllocationSiteOverrideMode mode) {
4800 if (mode == DISABLE_ALLOCATION_SITES) {
4801 T stub(masm->isolate(),
4802 GetInitialFastElementsKind(),
4803 mode);
4804 __ TailCallStub(&stub);
4805 } else if (mode == DONT_OVERRIDE) {
4806 int last_index = GetSequenceIndexFromFastElementsKind(
4807 TERMINAL_FAST_ELEMENTS_KIND);
4808 for (int i = 0; i <= last_index; ++i) {
4809 Label next;
4810 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4811 __ cmp(edx, kind);
4812 __ j(not_equal, &next);
4813 T stub(masm->isolate(), kind);
4814 __ TailCallStub(&stub);
4815 __ bind(&next);
4816 }
4817
4818 // If we reached this point there is a problem.
4819 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4820 } else {
4821 UNREACHABLE();
4822 }
4823}
4824
4825
4826static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4827 AllocationSiteOverrideMode mode) {
4828 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4829 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4830 // eax - number of arguments
4831 // edi - constructor?
4832 // esp[0] - return address
4833 // esp[4] - last argument
4834 Label normal_sequence;
4835 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004836 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4837 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4838 STATIC_ASSERT(FAST_ELEMENTS == 2);
4839 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4840 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4841 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004842
4843 // is the low bit set? If so, we are holey and that is good.
4844 __ test_b(edx, 1);
4845 __ j(not_zero, &normal_sequence);
4846 }
4847
4848 // look at the first argument
4849 __ mov(ecx, Operand(esp, kPointerSize));
4850 __ test(ecx, ecx);
4851 __ j(zero, &normal_sequence);
4852
4853 if (mode == DISABLE_ALLOCATION_SITES) {
4854 ElementsKind initial = GetInitialFastElementsKind();
4855 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4856
4857 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4858 holey_initial,
4859 DISABLE_ALLOCATION_SITES);
4860 __ TailCallStub(&stub_holey);
4861
4862 __ bind(&normal_sequence);
4863 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4864 initial,
4865 DISABLE_ALLOCATION_SITES);
4866 __ TailCallStub(&stub);
4867 } else if (mode == DONT_OVERRIDE) {
4868 // We are going to create a holey array, but our kind is non-holey.
4869 // Fix kind and retry.
4870 __ inc(edx);
4871
4872 if (FLAG_debug_code) {
4873 Handle<Map> allocation_site_map =
4874 masm->isolate()->factory()->allocation_site_map();
4875 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4876 __ Assert(equal, kExpectedAllocationSite);
4877 }
4878
4879 // Save the resulting elements kind in type info. We can't just store r3
4880 // in the AllocationSite::transition_info field because elements kind is
4881 // restricted to a portion of the field...upper bits need to be left alone.
4882 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4883 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
4884 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
4885
4886 __ bind(&normal_sequence);
4887 int last_index = GetSequenceIndexFromFastElementsKind(
4888 TERMINAL_FAST_ELEMENTS_KIND);
4889 for (int i = 0; i <= last_index; ++i) {
4890 Label next;
4891 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4892 __ cmp(edx, kind);
4893 __ j(not_equal, &next);
4894 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4895 __ TailCallStub(&stub);
4896 __ bind(&next);
4897 }
4898
4899 // If we reached this point there is a problem.
4900 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4901 } else {
4902 UNREACHABLE();
4903 }
4904}
4905
4906
4907template<class T>
4908static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4909 int to_index = GetSequenceIndexFromFastElementsKind(
4910 TERMINAL_FAST_ELEMENTS_KIND);
4911 for (int i = 0; i <= to_index; ++i) {
4912 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4913 T stub(isolate, kind);
4914 stub.GetCode();
4915 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4916 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4917 stub1.GetCode();
4918 }
4919 }
4920}
4921
4922
4923void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4924 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4925 isolate);
4926 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4927 isolate);
4928 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4929 isolate);
4930}
4931
4932
4933void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4934 Isolate* isolate) {
4935 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4936 for (int i = 0; i < 2; i++) {
4937 // For internal arrays we only need a few things
4938 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4939 stubh1.GetCode();
4940 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4941 stubh2.GetCode();
4942 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4943 stubh3.GetCode();
4944 }
4945}
4946
4947
4948void ArrayConstructorStub::GenerateDispatchToArrayStub(
4949 MacroAssembler* masm,
4950 AllocationSiteOverrideMode mode) {
4951 if (argument_count() == ANY) {
4952 Label not_zero_case, not_one_case;
4953 __ test(eax, eax);
4954 __ j(not_zero, &not_zero_case);
4955 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4956
4957 __ bind(&not_zero_case);
4958 __ cmp(eax, 1);
4959 __ j(greater, &not_one_case);
4960 CreateArrayDispatchOneArgument(masm, mode);
4961
4962 __ bind(&not_one_case);
4963 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4964 } else if (argument_count() == NONE) {
4965 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4966 } else if (argument_count() == ONE) {
4967 CreateArrayDispatchOneArgument(masm, mode);
4968 } else if (argument_count() == MORE_THAN_ONE) {
4969 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4970 } else {
4971 UNREACHABLE();
4972 }
4973}
4974
4975
4976void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4977 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004978 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004979 // -- ebx : AllocationSite or undefined
4980 // -- edi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004981 // -- edx : Original constructor
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004982 // -- esp[0] : return address
4983 // -- esp[4] : last argument
4984 // -----------------------------------
4985 if (FLAG_debug_code) {
4986 // The array construct code is only set for the global and natives
4987 // builtin Array functions which always have maps.
4988
4989 // Initial map for the builtin Array function should be a map.
4990 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4991 // Will both indicate a NULL and a Smi.
4992 __ test(ecx, Immediate(kSmiTagMask));
4993 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4994 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4995 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4996
4997 // We should either have undefined in ebx or a valid AllocationSite
4998 __ AssertUndefinedOrAllocationSite(ebx);
4999 }
5000
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005001 Label subclassing;
5002
5003 // Enter the context of the Array function.
5004 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
5005
5006 __ cmp(edx, edi);
5007 __ j(not_equal, &subclassing);
5008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005009 Label no_info;
5010 // If the feedback vector is the undefined value call an array constructor
5011 // that doesn't use AllocationSites.
5012 __ cmp(ebx, isolate()->factory()->undefined_value());
5013 __ j(equal, &no_info);
5014
5015 // Only look at the lower 16 bits of the transition info.
5016 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
5017 __ SmiUntag(edx);
5018 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5019 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
5020 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5021
5022 __ bind(&no_info);
5023 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005024
5025 // Subclassing.
5026 __ bind(&subclassing);
5027 switch (argument_count()) {
5028 case ANY:
5029 case MORE_THAN_ONE:
5030 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
5031 __ add(eax, Immediate(3));
5032 break;
5033 case NONE:
5034 __ mov(Operand(esp, 1 * kPointerSize), edi);
5035 __ mov(eax, Immediate(3));
5036 break;
5037 case ONE:
5038 __ mov(Operand(esp, 2 * kPointerSize), edi);
5039 __ mov(eax, Immediate(4));
5040 break;
5041 }
5042 __ PopReturnAddressTo(ecx);
5043 __ Push(edx);
5044 __ Push(ebx);
5045 __ PushReturnAddressFrom(ecx);
5046 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005047}
5048
5049
5050void InternalArrayConstructorStub::GenerateCase(
5051 MacroAssembler* masm, ElementsKind kind) {
5052 Label not_zero_case, not_one_case;
5053 Label normal_sequence;
5054
5055 __ test(eax, eax);
5056 __ j(not_zero, &not_zero_case);
5057 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
5058 __ TailCallStub(&stub0);
5059
5060 __ bind(&not_zero_case);
5061 __ cmp(eax, 1);
5062 __ j(greater, &not_one_case);
5063
5064 if (IsFastPackedElementsKind(kind)) {
5065 // We might need to create a holey array
5066 // look at the first argument
5067 __ mov(ecx, Operand(esp, kPointerSize));
5068 __ test(ecx, ecx);
5069 __ j(zero, &normal_sequence);
5070
5071 InternalArraySingleArgumentConstructorStub
5072 stub1_holey(isolate(), GetHoleyElementsKind(kind));
5073 __ TailCallStub(&stub1_holey);
5074 }
5075
5076 __ bind(&normal_sequence);
5077 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
5078 __ TailCallStub(&stub1);
5079
5080 __ bind(&not_one_case);
5081 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
5082 __ TailCallStub(&stubN);
5083}
5084
5085
5086void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5087 // ----------- S t a t e -------------
5088 // -- eax : argc
5089 // -- edi : constructor
5090 // -- esp[0] : return address
5091 // -- esp[4] : last argument
5092 // -----------------------------------
5093
5094 if (FLAG_debug_code) {
5095 // The array construct code is only set for the global and natives
5096 // builtin Array functions which always have maps.
5097
5098 // Initial map for the builtin Array function should be a map.
5099 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5100 // Will both indicate a NULL and a Smi.
5101 __ test(ecx, Immediate(kSmiTagMask));
5102 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5103 __ CmpObjectType(ecx, MAP_TYPE, ecx);
5104 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5105 }
5106
5107 // Figure out the right elements kind
5108 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5109
5110 // Load the map's "bit field 2" into |result|. We only need the first byte,
5111 // but the following masking takes care of that anyway.
5112 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5113 // Retrieve elements_kind from bit field 2.
5114 __ DecodeField<Map::ElementsKindBits>(ecx);
5115
5116 if (FLAG_debug_code) {
5117 Label done;
5118 __ cmp(ecx, Immediate(FAST_ELEMENTS));
5119 __ j(equal, &done);
5120 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
5121 __ Assert(equal,
5122 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5123 __ bind(&done);
5124 }
5125
5126 Label fast_elements_case;
5127 __ cmp(ecx, Immediate(FAST_ELEMENTS));
5128 __ j(equal, &fast_elements_case);
5129 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5130
5131 __ bind(&fast_elements_case);
5132 GenerateCase(masm, FAST_ELEMENTS);
5133}
5134
5135
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005136void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5137 Register context_reg = esi;
5138 Register slot_reg = ebx;
5139 Register result_reg = eax;
5140 Label slow_case;
5141
5142 // Go up context chain to the script context.
5143 for (int i = 0; i < depth(); ++i) {
5144 __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5145 context_reg = result_reg;
5146 }
5147
5148 // Load the PropertyCell value at the specified slot.
5149 __ mov(result_reg, ContextOperand(context_reg, slot_reg));
5150 __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
5151
5152 // Check that value is not the_hole.
5153 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
5154 __ j(equal, &slow_case, Label::kNear);
5155 __ Ret();
5156
5157 // Fallback to the runtime.
5158 __ bind(&slow_case);
5159 __ SmiTag(slot_reg);
5160 __ Pop(result_reg); // Pop return address.
5161 __ Push(slot_reg);
5162 __ Push(result_reg); // Push return address.
5163 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5164}
5165
5166
5167void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5168 Register context_reg = esi;
5169 Register slot_reg = ebx;
5170 Register value_reg = eax;
5171 Register cell_reg = edi;
5172 Register cell_details_reg = edx;
5173 Register cell_value_reg = ecx;
5174 Label fast_heapobject_case, fast_smi_case, slow_case;
5175
5176 if (FLAG_debug_code) {
5177 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
5178 __ Check(not_equal, kUnexpectedValue);
5179 }
5180
5181 // Go up context chain to the script context.
5182 for (int i = 0; i < depth(); ++i) {
5183 __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5184 context_reg = cell_reg;
5185 }
5186
5187 // Load the PropertyCell at the specified slot.
5188 __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
5189
5190 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5191 __ mov(cell_details_reg,
5192 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
5193 __ SmiUntag(cell_details_reg);
5194 __ and_(cell_details_reg,
5195 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5196 PropertyDetails::KindField::kMask |
5197 PropertyDetails::kAttributesReadOnlyMask));
5198
5199 // Check if PropertyCell holds mutable data.
5200 Label not_mutable_data;
5201 __ cmp(cell_details_reg,
5202 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5203 PropertyCellType::kMutable) |
5204 PropertyDetails::KindField::encode(kData)));
5205 __ j(not_equal, &not_mutable_data);
5206 __ JumpIfSmi(value_reg, &fast_smi_case);
5207 __ bind(&fast_heapobject_case);
5208 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5209 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5210 cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5211 OMIT_SMI_CHECK);
5212 // RecordWriteField clobbers the value register, so we need to reload.
5213 __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5214 __ Ret();
5215 __ bind(&not_mutable_data);
5216
5217 // Check if PropertyCell value matches the new value (relevant for Constant,
5218 // ConstantType and Undefined cells).
5219 Label not_same_value;
5220 __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5221 __ cmp(cell_value_reg, value_reg);
5222 __ j(not_equal, &not_same_value,
5223 FLAG_debug_code ? Label::kFar : Label::kNear);
5224 // Make sure the PropertyCell is not marked READ_ONLY.
5225 __ test(cell_details_reg,
5226 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5227 __ j(not_zero, &slow_case);
5228 if (FLAG_debug_code) {
5229 Label done;
5230 // This can only be true for Constant, ConstantType and Undefined cells,
5231 // because we never store the_hole via this stub.
5232 __ cmp(cell_details_reg,
5233 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5234 PropertyCellType::kConstant) |
5235 PropertyDetails::KindField::encode(kData)));
5236 __ j(equal, &done);
5237 __ cmp(cell_details_reg,
5238 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5239 PropertyCellType::kConstantType) |
5240 PropertyDetails::KindField::encode(kData)));
5241 __ j(equal, &done);
5242 __ cmp(cell_details_reg,
5243 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5244 PropertyCellType::kUndefined) |
5245 PropertyDetails::KindField::encode(kData)));
5246 __ Check(equal, kUnexpectedValue);
5247 __ bind(&done);
5248 }
5249 __ Ret();
5250 __ bind(&not_same_value);
5251
5252 // Check if PropertyCell contains data with constant type (and is not
5253 // READ_ONLY).
5254 __ cmp(cell_details_reg,
5255 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5256 PropertyCellType::kConstantType) |
5257 PropertyDetails::KindField::encode(kData)));
5258 __ j(not_equal, &slow_case, Label::kNear);
5259
5260 // Now either both old and new values must be SMIs or both must be heap
5261 // objects with same map.
5262 Label value_is_heap_object;
5263 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5264 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5265 // Old and new values are SMIs, no need for a write barrier here.
5266 __ bind(&fast_smi_case);
5267 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5268 __ Ret();
5269 __ bind(&value_is_heap_object);
5270 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5271 Register cell_value_map_reg = cell_value_reg;
5272 __ mov(cell_value_map_reg,
5273 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5274 __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5275 __ j(equal, &fast_heapobject_case);
5276
5277 // Fallback to the runtime.
5278 __ bind(&slow_case);
5279 __ SmiTag(slot_reg);
5280 __ Pop(cell_reg); // Pop return address.
5281 __ Push(slot_reg);
5282 __ Push(value_reg);
5283 __ Push(cell_reg); // Push return address.
5284 __ TailCallRuntime(is_strict(language_mode())
5285 ? Runtime::kStoreGlobalViaContext_Strict
5286 : Runtime::kStoreGlobalViaContext_Sloppy);
5287}
5288
5289
5290// Generates an Operand for saving parameters after PrepareCallApiFunction.
5291static Operand ApiParameterOperand(int index) {
5292 return Operand(esp, index * kPointerSize);
5293}
5294
5295
5296// Prepares stack to put arguments (aligns and so on). Reserves
5297// space for return value if needed (assumes the return value is a handle).
5298// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
5299// etc. Saves context (esi). If space was reserved for return value then
5300// stores the pointer to the reserved slot into esi.
5301static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
5302 __ EnterApiExitFrame(argc);
5303 if (__ emit_debug_code()) {
5304 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
5305 }
5306}
5307
5308
5309// Calls an API function. Allocates HandleScope, extracts returned value
5310// from handle and propagates exceptions. Clobbers ebx, edi and
5311// caller-save registers. Restores context. On return removes
5312// stack_space * kPointerSize (GCed).
5313static void CallApiFunctionAndReturn(MacroAssembler* masm,
5314 Register function_address,
5315 ExternalReference thunk_ref,
5316 Operand thunk_last_arg, int stack_space,
5317 Operand* stack_space_operand,
5318 Operand return_value_operand,
5319 Operand* context_restore_operand) {
5320 Isolate* isolate = masm->isolate();
5321
5322 ExternalReference next_address =
5323 ExternalReference::handle_scope_next_address(isolate);
5324 ExternalReference limit_address =
5325 ExternalReference::handle_scope_limit_address(isolate);
5326 ExternalReference level_address =
5327 ExternalReference::handle_scope_level_address(isolate);
5328
5329 DCHECK(edx.is(function_address));
5330 // Allocate HandleScope in callee-save registers.
5331 __ mov(ebx, Operand::StaticVariable(next_address));
5332 __ mov(edi, Operand::StaticVariable(limit_address));
5333 __ add(Operand::StaticVariable(level_address), Immediate(1));
5334
5335 if (FLAG_log_timer_events) {
5336 FrameScope frame(masm, StackFrame::MANUAL);
5337 __ PushSafepointRegisters();
5338 __ PrepareCallCFunction(1, eax);
5339 __ mov(Operand(esp, 0),
5340 Immediate(ExternalReference::isolate_address(isolate)));
5341 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5342 1);
5343 __ PopSafepointRegisters();
5344 }
5345
5346
5347 Label profiler_disabled;
5348 Label end_profiler_check;
5349 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
5350 __ cmpb(Operand(eax, 0), 0);
5351 __ j(zero, &profiler_disabled);
5352
5353 // Additional parameter is the address of the actual getter function.
5354 __ mov(thunk_last_arg, function_address);
5355 // Call the api function.
5356 __ mov(eax, Immediate(thunk_ref));
5357 __ call(eax);
5358 __ jmp(&end_profiler_check);
5359
5360 __ bind(&profiler_disabled);
5361 // Call the api function.
5362 __ call(function_address);
5363 __ bind(&end_profiler_check);
5364
5365 if (FLAG_log_timer_events) {
5366 FrameScope frame(masm, StackFrame::MANUAL);
5367 __ PushSafepointRegisters();
5368 __ PrepareCallCFunction(1, eax);
5369 __ mov(Operand(esp, 0),
5370 Immediate(ExternalReference::isolate_address(isolate)));
5371 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5372 1);
5373 __ PopSafepointRegisters();
5374 }
5375
5376 Label prologue;
5377 // Load the value from ReturnValue
5378 __ mov(eax, return_value_operand);
5379
5380 Label promote_scheduled_exception;
5381 Label delete_allocated_handles;
5382 Label leave_exit_frame;
5383
5384 __ bind(&prologue);
5385 // No more valid handles (the result handle was the last one). Restore
5386 // previous handle scope.
5387 __ mov(Operand::StaticVariable(next_address), ebx);
5388 __ sub(Operand::StaticVariable(level_address), Immediate(1));
5389 __ Assert(above_equal, kInvalidHandleScopeLevel);
5390 __ cmp(edi, Operand::StaticVariable(limit_address));
5391 __ j(not_equal, &delete_allocated_handles);
5392
5393 // Leave the API exit frame.
5394 __ bind(&leave_exit_frame);
5395 bool restore_context = context_restore_operand != NULL;
5396 if (restore_context) {
5397 __ mov(esi, *context_restore_operand);
5398 }
5399 if (stack_space_operand != nullptr) {
5400 __ mov(ebx, *stack_space_operand);
5401 }
5402 __ LeaveApiExitFrame(!restore_context);
5403
5404 // Check if the function scheduled an exception.
5405 ExternalReference scheduled_exception_address =
5406 ExternalReference::scheduled_exception_address(isolate);
5407 __ cmp(Operand::StaticVariable(scheduled_exception_address),
5408 Immediate(isolate->factory()->the_hole_value()));
5409 __ j(not_equal, &promote_scheduled_exception);
5410
5411#if DEBUG
5412 // Check if the function returned a valid JavaScript value.
5413 Label ok;
5414 Register return_value = eax;
5415 Register map = ecx;
5416
5417 __ JumpIfSmi(return_value, &ok, Label::kNear);
5418 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
5419
5420 __ CmpInstanceType(map, LAST_NAME_TYPE);
5421 __ j(below_equal, &ok, Label::kNear);
5422
5423 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5424 __ j(above_equal, &ok, Label::kNear);
5425
5426 __ cmp(map, isolate->factory()->heap_number_map());
5427 __ j(equal, &ok, Label::kNear);
5428
5429 __ cmp(return_value, isolate->factory()->undefined_value());
5430 __ j(equal, &ok, Label::kNear);
5431
5432 __ cmp(return_value, isolate->factory()->true_value());
5433 __ j(equal, &ok, Label::kNear);
5434
5435 __ cmp(return_value, isolate->factory()->false_value());
5436 __ j(equal, &ok, Label::kNear);
5437
5438 __ cmp(return_value, isolate->factory()->null_value());
5439 __ j(equal, &ok, Label::kNear);
5440
5441 __ Abort(kAPICallReturnedInvalidObject);
5442
5443 __ bind(&ok);
5444#endif
5445
5446 if (stack_space_operand != nullptr) {
5447 DCHECK_EQ(0, stack_space);
5448 __ pop(ecx);
5449 __ add(esp, ebx);
5450 __ jmp(ecx);
5451 } else {
5452 __ ret(stack_space * kPointerSize);
5453 }
5454
5455 // Re-throw by promoting a scheduled exception.
5456 __ bind(&promote_scheduled_exception);
5457 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5458
5459 // HandleScope limit has changed. Delete allocated extensions.
5460 ExternalReference delete_extensions =
5461 ExternalReference::delete_handle_scope_extensions(isolate);
5462 __ bind(&delete_allocated_handles);
5463 __ mov(Operand::StaticVariable(limit_address), edi);
5464 __ mov(edi, eax);
5465 __ mov(Operand(esp, 0),
5466 Immediate(ExternalReference::isolate_address(isolate)));
5467 __ mov(eax, Immediate(delete_extensions));
5468 __ call(eax);
5469 __ mov(eax, edi);
5470 __ jmp(&leave_exit_frame);
5471}
5472
5473
5474static void CallApiFunctionStubHelper(MacroAssembler* masm,
5475 const ParameterCount& argc,
5476 bool return_first_arg,
5477 bool call_data_undefined) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005478 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005479 // -- edi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005480 // -- ebx : call_data
5481 // -- ecx : holder
5482 // -- edx : api_function_address
5483 // -- esi : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005484 // -- eax : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005485 // --
5486 // -- esp[0] : return address
5487 // -- esp[4] : last argument
5488 // -- ...
5489 // -- esp[argc * 4] : first argument
5490 // -- esp[(argc + 1) * 4] : receiver
5491 // -----------------------------------
5492
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005493 Register callee = edi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005494 Register call_data = ebx;
5495 Register holder = ecx;
5496 Register api_function_address = edx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005497 Register context = esi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005498 Register return_address = eax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005499
5500 typedef FunctionCallbackArguments FCA;
5501
5502 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5503 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5504 STATIC_ASSERT(FCA::kDataIndex == 4);
5505 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5506 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5507 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5508 STATIC_ASSERT(FCA::kHolderIndex == 0);
5509 STATIC_ASSERT(FCA::kArgsLength == 7);
5510
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005511 DCHECK(argc.is_immediate() || eax.is(argc.reg()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005513 if (argc.is_immediate()) {
5514 __ pop(return_address);
5515 // context save.
5516 __ push(context);
5517 } else {
5518 // pop return address and save context
5519 __ xchg(context, Operand(esp, 0));
5520 return_address = context;
5521 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005522
5523 // callee
5524 __ push(callee);
5525
5526 // call data
5527 __ push(call_data);
5528
5529 Register scratch = call_data;
5530 if (!call_data_undefined) {
5531 // return value
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005532 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005533 // return value default
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005534 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005535 } else {
5536 // return value
5537 __ push(scratch);
5538 // return value default
5539 __ push(scratch);
5540 }
5541 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005542 __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005543 // holder
5544 __ push(holder);
5545
5546 __ mov(scratch, esp);
5547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005548 // push return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005549 __ push(return_address);
5550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005551 // load context from callee
5552 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5553
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005554 // API function gets reference to the v8::Arguments. If CPU profiler
5555 // is enabled wrapper function will be called and we need to pass
5556 // address of the callback as additional parameter, always allocate
5557 // space for it.
5558 const int kApiArgc = 1 + 1;
5559
5560 // Allocate the v8::Arguments structure in the arguments' space since
5561 // it's not controlled by GC.
5562 const int kApiStackSpace = 4;
5563
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005564 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005565
5566 // FunctionCallbackInfo::implicit_args_.
5567 __ mov(ApiParameterOperand(2), scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005568 if (argc.is_immediate()) {
5569 __ add(scratch,
5570 Immediate((argc.immediate() + FCA::kArgsLength - 1) * kPointerSize));
5571 // FunctionCallbackInfo::values_.
5572 __ mov(ApiParameterOperand(3), scratch);
5573 // FunctionCallbackInfo::length_.
5574 __ Move(ApiParameterOperand(4), Immediate(argc.immediate()));
5575 // FunctionCallbackInfo::is_construct_call_.
5576 __ Move(ApiParameterOperand(5), Immediate(0));
5577 } else {
5578 __ lea(scratch, Operand(scratch, argc.reg(), times_pointer_size,
5579 (FCA::kArgsLength - 1) * kPointerSize));
5580 // FunctionCallbackInfo::values_.
5581 __ mov(ApiParameterOperand(3), scratch);
5582 // FunctionCallbackInfo::length_.
5583 __ mov(ApiParameterOperand(4), argc.reg());
5584 // FunctionCallbackInfo::is_construct_call_.
5585 __ lea(argc.reg(), Operand(argc.reg(), times_pointer_size,
5586 (FCA::kArgsLength + 1) * kPointerSize));
5587 __ mov(ApiParameterOperand(5), argc.reg());
5588 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005589
5590 // v8::InvocationCallback's argument.
5591 __ lea(scratch, ApiParameterOperand(2));
5592 __ mov(ApiParameterOperand(0), scratch);
5593
5594 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005595 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005596
5597 Operand context_restore_operand(ebp,
5598 (2 + FCA::kContextSaveIndex) * kPointerSize);
5599 // Stores return the first js argument
5600 int return_value_offset = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005601 if (return_first_arg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005602 return_value_offset = 2 + FCA::kArgsLength;
5603 } else {
5604 return_value_offset = 2 + FCA::kReturnValueOffset;
5605 }
5606 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005607 int stack_space = 0;
5608 Operand is_construct_call_operand = ApiParameterOperand(5);
5609 Operand* stack_space_operand = &is_construct_call_operand;
5610 if (argc.is_immediate()) {
5611 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5612 stack_space_operand = nullptr;
5613 }
5614 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5615 ApiParameterOperand(1), stack_space,
5616 stack_space_operand, return_value_operand,
5617 &context_restore_operand);
5618}
5619
5620
5621void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5622 bool call_data_undefined = this->call_data_undefined();
5623 CallApiFunctionStubHelper(masm, ParameterCount(eax), false,
5624 call_data_undefined);
5625}
5626
5627
5628void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5629 bool is_store = this->is_store();
5630 int argc = this->argc();
5631 bool call_data_undefined = this->call_data_undefined();
5632 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5633 call_data_undefined);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005634}
5635
5636
5637void CallApiGetterStub::Generate(MacroAssembler* masm) {
5638 // ----------- S t a t e -------------
5639 // -- esp[0] : return address
5640 // -- esp[4] : name
5641 // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
5642 // -- ...
5643 // -- edx : api_function_address
5644 // -----------------------------------
5645 DCHECK(edx.is(ApiGetterDescriptor::function_address()));
5646
5647 // array for v8::Arguments::values_, handler for name and pointer
5648 // to the values (it considered as smi in GC).
5649 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
5650 // Allocate space for opional callback address parameter in case
5651 // CPU profiler is active.
5652 const int kApiArgc = 2 + 1;
5653
5654 Register api_function_address = edx;
5655 Register scratch = ebx;
5656
5657 // load address of name
5658 __ lea(scratch, Operand(esp, 1 * kPointerSize));
5659
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005660 PrepareCallApiFunction(masm, kApiArgc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005661 __ mov(ApiParameterOperand(0), scratch); // name.
5662 __ add(scratch, Immediate(kPointerSize));
5663 __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
5664
5665 ExternalReference thunk_ref =
5666 ExternalReference::invoke_accessor_getter_callback(isolate());
5667
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005668 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5669 ApiParameterOperand(2), kStackSpace, nullptr,
5670 Operand(ebp, 7 * kPointerSize), NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005671}
5672
5673
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005674#undef __
5675
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005676} // namespace internal
5677} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005678
5679#endif // V8_TARGET_ARCH_IA32