blob: 7b069ac6292c98ee220bf1465f49a676811c98c2 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X87
6
Ben Murdochda12d292016-06-02 14:46:10 +01007#include "src/code-stubs.h"
8#include "src/api-arguments.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/ic/handler-compiler.h"
13#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000016#include "src/regexp/jsregexp.h"
17#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000019#include "src/x87/code-stubs-x87.h"
20#include "src/x87/frames-x87.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021
22namespace v8 {
23namespace internal {
24
Ben Murdoch61f157c2016-09-16 13:49:30 +010025#define __ ACCESS_MASM(masm)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026
Ben Murdoch61f157c2016-09-16 13:49:30 +010027void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28 __ pop(ecx);
29 __ mov(MemOperand(esp, eax, times_4, 0), edi);
30 __ push(edi);
31 __ push(ebx);
32 __ push(ecx);
33 __ add(eax, Immediate(3));
34 __ TailCallRuntime(Runtime::kNewArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035}
36
Ben Murdochda12d292016-06-02 14:46:10 +010037void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
38 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
39 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
40}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041
Ben Murdoch61f157c2016-09-16 13:49:30 +010042void FastFunctionBindStub::InitializeDescriptor(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 CodeStubDescriptor* descriptor) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010044 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
45 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046}
47
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
49 ExternalReference miss) {
50 // Update the static counter each time a new code stub is generated.
51 isolate()->counters()->code_stubs()->Increment();
52
53 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 {
56 // Call the runtime system in a fresh internal frame.
57 FrameScope scope(masm, StackFrame::INTERNAL);
58 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 eax.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 // Push arguments
61 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062 __ push(descriptor.GetRegisterParameter(i));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063 }
64 __ CallExternalReference(miss, param_count);
65 }
66
67 __ ret(0);
68}
69
70
71void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
72 // We don't allow a GC during a store buffer overflow so there is no need to
73 // store the registers in any particular way, but we do have to store and
74 // restore them.
75 __ pushad();
76 if (save_doubles()) {
77 // Save FPU stat in m108byte.
78 __ sub(esp, Immediate(108));
79 __ fnsave(Operand(esp, 0));
80 }
81 const int argument_count = 1;
82
83 AllowExternalCallThatCantCauseGC scope(masm);
84 __ PrepareCallCFunction(argument_count, ecx);
85 __ mov(Operand(esp, 0 * kPointerSize),
86 Immediate(ExternalReference::isolate_address(isolate())));
87 __ CallCFunction(
88 ExternalReference::store_buffer_overflow_function(isolate()),
89 argument_count);
90 if (save_doubles()) {
91 // Restore FPU stat in m108byte.
92 __ frstor(Operand(esp, 0));
93 __ add(esp, Immediate(108));
94 }
95 __ popad();
96 __ ret(0);
97}
98
99
100class FloatingPointHelper : public AllStatic {
101 public:
102 enum ArgLocation {
103 ARGS_ON_STACK,
104 ARGS_IN_REGISTERS
105 };
106
107 // Code pattern for loading a floating point value. Input value must
108 // be either a smi or a heap number object (fp value). Requirements:
109 // operand in register number. Returns operand as floating point number
110 // on FPU stack.
111 static void LoadFloatOperand(MacroAssembler* masm, Register number);
112
113 // Test if operands are smi or number objects (fp). Requirements:
114 // operand_1 in eax, operand_2 in edx; falls through on float
115 // operands, jumps to the non_float label otherwise.
116 static void CheckFloatOperands(MacroAssembler* masm,
117 Label* non_float,
118 Register scratch);
119};
120
121
122void DoubleToIStub::Generate(MacroAssembler* masm) {
123 Register input_reg = this->source();
124 Register final_result_reg = this->destination();
125 DCHECK(is_truncating());
126
127 Label check_negative, process_64_bits, done, done_no_stash;
128
129 int double_offset = offset();
130
131 // Account for return address and saved regs if input is esp.
132 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
133
134 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
135 MemOperand exponent_operand(MemOperand(input_reg,
136 double_offset + kDoubleSize / 2));
137
138 Register scratch1;
139 {
140 Register scratch_candidates[3] = { ebx, edx, edi };
141 for (int i = 0; i < 3; i++) {
142 scratch1 = scratch_candidates[i];
143 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
144 }
145 }
146 // Since we must use ecx for shifts below, use some other register (eax)
147 // to calculate the result if ecx is the requested return register.
148 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
149 // Save ecx if it isn't the return register and therefore volatile, or if it
150 // is the return register, then save the temp register we use in its stead for
151 // the result.
152 Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
153 __ push(scratch1);
154 __ push(save_reg);
155
156 bool stash_exponent_copy = !input_reg.is(esp);
157 __ mov(scratch1, mantissa_operand);
158 __ mov(ecx, exponent_operand);
159 if (stash_exponent_copy) __ push(ecx);
160
161 __ and_(ecx, HeapNumber::kExponentMask);
162 __ shr(ecx, HeapNumber::kExponentShift);
163 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
164 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
165 __ j(below, &process_64_bits);
166
167 // Result is entirely in lower 32-bits of mantissa
168 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
169 __ sub(ecx, Immediate(delta));
170 __ xor_(result_reg, result_reg);
171 __ cmp(ecx, Immediate(31));
172 __ j(above, &done);
173 __ shl_cl(scratch1);
174 __ jmp(&check_negative);
175
176 __ bind(&process_64_bits);
177 // Result must be extracted from shifted 32-bit mantissa
178 __ sub(ecx, Immediate(delta));
179 __ neg(ecx);
180 if (stash_exponent_copy) {
181 __ mov(result_reg, MemOperand(esp, 0));
182 } else {
183 __ mov(result_reg, exponent_operand);
184 }
185 __ and_(result_reg,
186 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
187 __ add(result_reg,
188 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
Ben Murdochda12d292016-06-02 14:46:10 +0100189 __ shrd_cl(scratch1, result_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190 __ shr_cl(result_reg);
191 __ test(ecx, Immediate(32));
192 {
193 Label skip_mov;
194 __ j(equal, &skip_mov, Label::kNear);
195 __ mov(scratch1, result_reg);
196 __ bind(&skip_mov);
197 }
198
199 // If the double was negative, negate the integer result.
200 __ bind(&check_negative);
201 __ mov(result_reg, scratch1);
202 __ neg(result_reg);
203 if (stash_exponent_copy) {
204 __ cmp(MemOperand(esp, 0), Immediate(0));
205 } else {
206 __ cmp(exponent_operand, Immediate(0));
207 }
208 {
209 Label skip_mov;
210 __ j(less_equal, &skip_mov, Label::kNear);
211 __ mov(result_reg, scratch1);
212 __ bind(&skip_mov);
213 }
214
215 // Restore registers
216 __ bind(&done);
217 if (stash_exponent_copy) {
218 __ add(esp, Immediate(kDoubleSize / 2));
219 }
220 __ bind(&done_no_stash);
221 if (!final_result_reg.is(result_reg)) {
222 DCHECK(final_result_reg.is(ecx));
223 __ mov(final_result_reg, result_reg);
224 }
225 __ pop(save_reg);
226 __ pop(scratch1);
227 __ ret(0);
228}
229
230
231void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
232 Register number) {
233 Label load_smi, done;
234
235 __ JumpIfSmi(number, &load_smi, Label::kNear);
236 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
237 __ jmp(&done, Label::kNear);
238
239 __ bind(&load_smi);
240 __ SmiUntag(number);
241 __ push(number);
242 __ fild_s(Operand(esp, 0));
243 __ pop(number);
244
245 __ bind(&done);
246}
247
248
249void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
250 Label* non_float,
251 Register scratch) {
252 Label test_other, done;
253 // Test if both operands are floats or smi -> scratch=k_is_float;
254 // Otherwise scratch = k_not_float.
255 __ JumpIfSmi(edx, &test_other, Label::kNear);
256 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
257 Factory* factory = masm->isolate()->factory();
258 __ cmp(scratch, factory->heap_number_map());
259 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
260
261 __ bind(&test_other);
262 __ JumpIfSmi(eax, &done, Label::kNear);
263 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
264 __ cmp(scratch, factory->heap_number_map());
265 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
266
267 // Fall-through: Both operands are numbers.
268 __ bind(&done);
269}
270
271
272void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 const Register base = edx;
274 const Register scratch = ecx;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 Label call_runtime;
276
277 // We will call runtime helper function directly.
278 if (exponent_type() == ON_STACK) {
279 // The arguments are still on the stack.
280 __ bind(&call_runtime);
281 __ TailCallRuntime(Runtime::kMathPowRT);
282
283 // The stub is called from non-optimized code, which expects the result
284 // as heap number in exponent.
285 __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
286 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 __ ret(2 * kPointerSize);
288 } else {
289 // Currently it's only called from full-compiler and exponent type is
290 // ON_STACK.
291 UNIMPLEMENTED();
292 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293}
294
295
296void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
297 Label miss;
298 Register receiver = LoadDescriptor::ReceiverRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 // With careful management, we won't have to save slot and vector on
300 // the stack. Simply handle the possibly missing case first.
301 // TODO(mvstanton): this code can be more efficient.
302 __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
303 Immediate(isolate()->factory()->the_hole_value()));
304 __ j(equal, &miss);
305 __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
306 __ ret(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 __ bind(&miss);
309 PropertyAccessCompiler::TailCallBuiltin(
310 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
311}
312
313
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400314void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
315 // Return address is on the stack.
316 Label miss;
317
318 Register receiver = LoadDescriptor::ReceiverRegister();
319 Register index = LoadDescriptor::NameRegister();
320 Register scratch = edi;
321 DCHECK(!scratch.is(receiver) && !scratch.is(index));
322 Register result = eax;
323 DCHECK(!result.is(scratch));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
325 result.is(LoadDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400326
327 // StringCharAtGenerator doesn't use the result register until it's passed
328 // the different miss possibilities. If it did, we would have a conflict
329 // when FLAG_vector_ics is true.
330
331 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
332 &miss, // When not a string.
333 &miss, // When not a number.
334 &miss, // When index out of range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400335 RECEIVER_IS_STRING);
336 char_at_generator.GenerateFast(masm);
337 __ ret(0);
338
339 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341
342 __ bind(&miss);
343 PropertyAccessCompiler::TailCallBuiltin(
344 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
345}
346
347
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000348void RegExpExecStub::Generate(MacroAssembler* masm) {
349 // Just jump directly to runtime if native RegExp is not selected at compile
350 // time or if regexp entry in generated code is turned off runtime switch or
351 // at compilation.
352#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000353 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354#else // V8_INTERPRETED_REGEXP
355
356 // Stack frame on entry.
357 // esp[0]: return address
358 // esp[4]: last_match_info (expected JSArray)
359 // esp[8]: previous index
360 // esp[12]: subject string
361 // esp[16]: JSRegExp object
362
363 static const int kLastMatchInfoOffset = 1 * kPointerSize;
364 static const int kPreviousIndexOffset = 2 * kPointerSize;
365 static const int kSubjectOffset = 3 * kPointerSize;
366 static const int kJSRegExpOffset = 4 * kPointerSize;
367
368 Label runtime;
369 Factory* factory = isolate()->factory();
370
371 // Ensure that a RegExp stack is allocated.
372 ExternalReference address_of_regexp_stack_memory_address =
373 ExternalReference::address_of_regexp_stack_memory_address(isolate());
374 ExternalReference address_of_regexp_stack_memory_size =
375 ExternalReference::address_of_regexp_stack_memory_size(isolate());
376 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
377 __ test(ebx, ebx);
378 __ j(zero, &runtime);
379
380 // Check that the first argument is a JSRegExp object.
381 __ mov(eax, Operand(esp, kJSRegExpOffset));
382 STATIC_ASSERT(kSmiTag == 0);
383 __ JumpIfSmi(eax, &runtime);
384 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
385 __ j(not_equal, &runtime);
386
387 // Check that the RegExp has been compiled (data contains a fixed array).
388 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
389 if (FLAG_debug_code) {
390 __ test(ecx, Immediate(kSmiTagMask));
391 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
392 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
393 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
394 }
395
396 // ecx: RegExp data (FixedArray)
397 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
398 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
399 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
400 __ j(not_equal, &runtime);
401
402 // ecx: RegExp data (FixedArray)
403 // Check that the number of captures fit in the static offsets vector buffer.
404 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
405 // Check (number_of_captures + 1) * 2 <= offsets vector size
406 // Or number_of_captures * 2 <= offsets vector size - 2
407 // Multiplying by 2 comes for free since edx is smi-tagged.
408 STATIC_ASSERT(kSmiTag == 0);
409 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
410 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
411 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
412 __ j(above, &runtime);
413
414 // Reset offset for possibly sliced string.
415 __ Move(edi, Immediate(0));
416 __ mov(eax, Operand(esp, kSubjectOffset));
417 __ JumpIfSmi(eax, &runtime);
418 __ mov(edx, eax); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419
420 // eax: subject string
421 // edx: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000422 // ecx: RegExp data (FixedArray)
423 // Handle subject string according to its encoding and representation:
424 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100425 // (2) Sequential one byte? If yes, go to (5).
426 // (3) Sequential or cons? If not, go to (6).
427 // (4) Cons string. If the string is flat, replace subject with first string
428 // and go to (1). Otherwise bail out to runtime.
429 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000430 // (E) Carry on.
431 /// [...]
432
433 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100434 // (6) Long external string? If not, go to (10).
435 // (7) External string. Make it, offset-wise, look like a sequential string.
436 // (8) Is the external string one byte? If yes, go to (5).
437 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 // (10) Short external string or not a string? If yes, bail out to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100439 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440
Ben Murdoch097c5b22016-05-18 11:27:45 +0100441 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
442 external_string /* 7 */, check_underlying /* 1 */,
443 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444
Ben Murdoch097c5b22016-05-18 11:27:45 +0100445 __ bind(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 // (1) Sequential two byte? If yes, go to (9).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100447 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
448 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
449
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 __ and_(ebx, kIsNotStringMask |
451 kStringRepresentationMask |
452 kStringEncodingMask |
453 kShortExternalStringMask);
454 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
455 __ j(zero, &seq_two_byte_string); // Go to (9).
456
Ben Murdoch097c5b22016-05-18 11:27:45 +0100457 // (2) Sequential one byte? If yes, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 // Any other sequential string must be one byte.
459 __ and_(ebx, Immediate(kIsNotStringMask |
460 kStringRepresentationMask |
461 kShortExternalStringMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100462 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 // (3) Sequential or cons? If not, go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 // We check whether the subject string is a cons, since sequential strings
466 // have already been covered.
467 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
468 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
469 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
470 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
471 __ cmp(ebx, Immediate(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100472 __ j(greater_equal, &not_seq_nor_cons); // Go to (6).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473
474 // (4) Cons string. Check that it's flat.
475 // Replace subject with first string and reload instance type.
476 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
477 __ j(not_equal, &runtime);
478 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100479 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
481 // eax: sequential subject string (or look-alike, external string)
482 // edx: original subject string
483 // ecx: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100484 // (5) One byte sequential. Load regexp code for one byte.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 __ bind(&seq_one_byte_string);
486 // Load previous index and check range before edx is overwritten. We have
487 // to use edx instead of eax here because it might have been only made to
488 // look like a sequential string when it actually is an external string.
489 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
490 __ JumpIfNotSmi(ebx, &runtime);
491 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
492 __ j(above_equal, &runtime);
493 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
494 __ Move(ecx, Immediate(1)); // Type is one byte.
495
496 // (E) Carry on. String handling is done.
497 __ bind(&check_code);
498 // edx: irregexp code
499 // Check that the irregexp code has been generated for the actual string
500 // encoding. If it has, the field contains a code object otherwise it contains
501 // a smi (code flushing support).
502 __ JumpIfSmi(edx, &runtime);
503
504 // eax: subject string
505 // ebx: previous index (smi)
506 // edx: code
507 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
508 // All checks done. Now push arguments for native regexp code.
509 Counters* counters = isolate()->counters();
510 __ IncrementCounter(counters->regexp_entry_native(), 1);
511
512 // Isolates: note we add an additional parameter here (isolate pointer).
513 static const int kRegExpExecuteArguments = 9;
514 __ EnterApiExitFrame(kRegExpExecuteArguments);
515
516 // Argument 9: Pass current isolate address.
517 __ mov(Operand(esp, 8 * kPointerSize),
518 Immediate(ExternalReference::isolate_address(isolate())));
519
520 // Argument 8: Indicate that this is a direct call from JavaScript.
521 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
522
523 // Argument 7: Start (high end) of backtracking stack memory area.
524 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
525 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
526 __ mov(Operand(esp, 6 * kPointerSize), esi);
527
528 // Argument 6: Set the number of capture registers to zero to force global
529 // regexps to behave as non-global. This does not affect non-global regexps.
530 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
531
532 // Argument 5: static offsets vector buffer.
533 __ mov(Operand(esp, 4 * kPointerSize),
534 Immediate(ExternalReference::address_of_static_offsets_vector(
535 isolate())));
536
537 // Argument 2: Previous index.
538 __ SmiUntag(ebx);
539 __ mov(Operand(esp, 1 * kPointerSize), ebx);
540
541 // Argument 1: Original subject string.
542 // The original subject is in the previous stack frame. Therefore we have to
543 // use ebp, which points exactly to one pointer size below the previous esp.
544 // (Because creating a new stack frame pushes the previous ebp onto the stack
545 // and thereby moves up esp by one kPointerSize.)
546 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
547 __ mov(Operand(esp, 0 * kPointerSize), esi);
548
549 // esi: original subject string
550 // eax: underlying subject string
551 // ebx: previous index
552 // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
553 // edx: code
554 // Argument 4: End of string data
555 // Argument 3: Start of string data
556 // Prepare start and end index of the input.
557 // Load the length from the original sliced string if that is the case.
558 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
559 __ add(esi, edi); // Calculate input end wrt offset.
560 __ SmiUntag(edi);
561 __ add(ebx, edi); // Calculate input start wrt offset.
562
563 // ebx: start index of the input string
564 // esi: end index of the input string
565 Label setup_two_byte, setup_rest;
566 __ test(ecx, ecx);
567 __ j(zero, &setup_two_byte, Label::kNear);
568 __ SmiUntag(esi);
569 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
570 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
571 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
572 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
573 __ jmp(&setup_rest, Label::kNear);
574
575 __ bind(&setup_two_byte);
576 STATIC_ASSERT(kSmiTag == 0);
577 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
578 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
579 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
580 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
581 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
582
583 __ bind(&setup_rest);
584
585 // Locate the code entry and call it.
586 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
587 __ call(edx);
588
589 // Drop arguments and come back to JS mode.
590 __ LeaveApiExitFrame(true);
591
592 // Check the result.
593 Label success;
594 __ cmp(eax, 1);
595 // We expect exactly one result since we force the called regexp to behave
596 // as non-global.
597 __ j(equal, &success);
598 Label failure;
599 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
600 __ j(equal, &failure);
601 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
602 // If not exception it can only be retry. Handle that in the runtime system.
603 __ j(not_equal, &runtime);
604 // Result must now be exception. If there is no pending exception already a
605 // stack overflow (on the backtrack stack) was detected in RegExp code but
606 // haven't created the exception yet. Handle that in the runtime system.
607 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
608 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
609 isolate());
610 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
611 __ mov(eax, Operand::StaticVariable(pending_exception));
612 __ cmp(edx, eax);
613 __ j(equal, &runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 // For exception, throw the exception again.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617
618 __ bind(&failure);
619 // For failure to match, return null.
620 __ mov(eax, factory->null_value());
621 __ ret(4 * kPointerSize);
622
623 // Load RegExp data.
624 __ bind(&success);
625 __ mov(eax, Operand(esp, kJSRegExpOffset));
626 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
627 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
628 // Calculate number of capture registers (number_of_captures + 1) * 2.
629 STATIC_ASSERT(kSmiTag == 0);
630 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
631 __ add(edx, Immediate(2)); // edx was a smi.
632
633 // edx: Number of capture registers
634 // Load last_match_info which is still known to be a fast case JSArray.
635 // Check that the fourth object is a JSArray object.
636 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
637 __ JumpIfSmi(eax, &runtime);
638 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
639 __ j(not_equal, &runtime);
640 // Check that the JSArray is in fast case.
641 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
642 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
643 __ cmp(eax, factory->fixed_array_map());
644 __ j(not_equal, &runtime);
645 // Check that the last match info has space for the capture registers and the
646 // additional information.
647 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
648 __ SmiUntag(eax);
649 __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
650 __ cmp(edx, eax);
651 __ j(greater, &runtime);
652
653 // ebx: last_match_info backing store (FixedArray)
654 // edx: number of capture registers
655 // Store the capture count.
656 __ SmiTag(edx); // Number of capture registers to smi.
657 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
658 __ SmiUntag(edx); // Number of capture registers back from smi.
659 // Store last subject and last input.
660 __ mov(eax, Operand(esp, kSubjectOffset));
661 __ mov(ecx, eax);
662 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
663 __ RecordWriteField(ebx, RegExpImpl::kLastSubjectOffset, eax, edi,
664 kDontSaveFPRegs);
665 __ mov(eax, ecx);
666 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
667 __ RecordWriteField(ebx, RegExpImpl::kLastInputOffset, eax, edi,
668 kDontSaveFPRegs);
669
670 // Get the static offsets vector filled by the native regexp code.
671 ExternalReference address_of_static_offsets_vector =
672 ExternalReference::address_of_static_offsets_vector(isolate());
673 __ mov(ecx, Immediate(address_of_static_offsets_vector));
674
675 // ebx: last_match_info backing store (FixedArray)
676 // ecx: offsets vector
677 // edx: number of capture registers
678 Label next_capture, done;
679 // Capture register counter starts from number of capture registers and
680 // counts down until wraping after zero.
681 __ bind(&next_capture);
682 __ sub(edx, Immediate(1));
683 __ j(negative, &done, Label::kNear);
684 // Read the value from the static offsets vector buffer.
685 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
686 __ SmiTag(edi);
687 // Store the smi value in the last match info.
688 __ mov(FieldOperand(ebx,
689 edx,
690 times_pointer_size,
691 RegExpImpl::kFirstCaptureOffset),
692 edi);
693 __ jmp(&next_capture);
694 __ bind(&done);
695
696 // Return last match info.
697 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
698 __ ret(4 * kPointerSize);
699
700 // Do the runtime call to execute the regexp.
701 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703
704 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100705 // (6) Long external string? If not, go to (10).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706 __ bind(&not_seq_nor_cons);
707 // Compare flags are still set from (3).
708 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
709
Ben Murdoch097c5b22016-05-18 11:27:45 +0100710 // (7) External string. Short external strings have been ruled out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711 __ bind(&external_string);
712 // Reload instance type.
713 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
714 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
715 if (FLAG_debug_code) {
716 // Assert that we do not have a cons or slice (indirect strings) here.
717 // Sequential strings have already been ruled out.
Ben Murdochda12d292016-06-02 14:46:10 +0100718 __ test_b(ebx, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 __ Assert(zero, kExternalStringExpectedButNotFound);
720 }
721 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
722 // Move the pointer so that offset-wise, it looks like a sequential string.
723 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
724 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
725 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100726 // (8) Is the external string one byte? If yes, go to (5).
Ben Murdochda12d292016-06-02 14:46:10 +0100727 __ test_b(ebx, Immediate(kStringEncodingMask));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100728 __ j(not_zero, &seq_one_byte_string); // Go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729
730 // eax: sequential subject string (or look-alike, external string)
731 // edx: original subject string
732 // ecx: RegExp data (FixedArray)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100733 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000734 __ bind(&seq_two_byte_string);
735 // Load previous index and check range before edx is overwritten. We have
736 // to use edx instead of eax here because it might have been only made to
737 // look like a sequential string when it actually is an external string.
738 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
739 __ JumpIfNotSmi(ebx, &runtime);
740 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
741 __ j(above_equal, &runtime);
742 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
743 __ Move(ecx, Immediate(0)); // Type is two byte.
744 __ jmp(&check_code); // Go to (E).
745
746 // (10) Not a string or a short external string? If yes, bail out to runtime.
747 __ bind(&not_long_external);
748 // Catch non-string subject or short external string.
749 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
750 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
751 __ j(not_zero, &runtime);
752
Ben Murdoch097c5b22016-05-18 11:27:45 +0100753 // (11) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 // Load offset into edi and replace subject string with parent.
755 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
756 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100757 __ jmp(&check_underlying); // Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000758#endif // V8_INTERPRETED_REGEXP
759}
760
761
762static int NegativeComparisonResult(Condition cc) {
763 DCHECK(cc != equal);
764 DCHECK((cc == less) || (cc == less_equal)
765 || (cc == greater) || (cc == greater_equal));
766 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
767}
768
769
770static void CheckInputType(MacroAssembler* masm, Register input,
771 CompareICState::State expected, Label* fail) {
772 Label ok;
773 if (expected == CompareICState::SMI) {
774 __ JumpIfNotSmi(input, fail);
775 } else if (expected == CompareICState::NUMBER) {
776 __ JumpIfSmi(input, &ok);
777 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
778 Immediate(masm->isolate()->factory()->heap_number_map()));
779 __ j(not_equal, fail);
780 }
781 // We could be strict about internalized/non-internalized here, but as long as
782 // hydrogen doesn't care, the stub doesn't have to care either.
783 __ bind(&ok);
784}
785
786
787static void BranchIfNotInternalizedString(MacroAssembler* masm,
788 Label* label,
789 Register object,
790 Register scratch) {
791 __ JumpIfSmi(object, label);
792 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
793 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
794 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
795 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
796 __ j(not_zero, label);
797}
798
799
800void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 Label runtime_call, check_unequal_objects;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802 Condition cc = GetCondition();
803
804 Label miss;
805 CheckInputType(masm, edx, left(), &miss);
806 CheckInputType(masm, eax, right(), &miss);
807
808 // Compare two smis.
809 Label non_smi, smi_done;
810 __ mov(ecx, edx);
811 __ or_(ecx, eax);
812 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
813 __ sub(edx, eax); // Return on the result of the subtraction.
814 __ j(no_overflow, &smi_done, Label::kNear);
815 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
816 __ bind(&smi_done);
817 __ mov(eax, edx);
818 __ ret(0);
819 __ bind(&non_smi);
820
821 // NOTICE! This code is only reached after a smi-fast-case check, so
822 // it is certain that at least one operand isn't a smi.
823
824 // Identical objects can be compared fast, but there are some tricky cases
825 // for NaN and undefined.
826 Label generic_heap_number_comparison;
827 {
828 Label not_identical;
829 __ cmp(eax, edx);
830 __ j(not_equal, &not_identical);
831
832 if (cc != equal) {
833 // Check for undefined. undefined OP undefined is false even though
834 // undefined == undefined.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835 __ cmp(edx, isolate()->factory()->undefined_value());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100836 Label check_for_nan;
837 __ j(not_equal, &check_for_nan, Label::kNear);
838 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
839 __ ret(0);
840 __ bind(&check_for_nan);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 }
842
843 // Test for NaN. Compare heap numbers in a general way,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 // to handle NaNs correctly.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
846 Immediate(isolate()->factory()->heap_number_map()));
847 __ j(equal, &generic_heap_number_comparison, Label::kNear);
848 if (cc != equal) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000849 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
850 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 // Call runtime on identical JSObjects. Otherwise return equal.
Ben Murdochda12d292016-06-02 14:46:10 +0100852 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000853 __ j(above_equal, &runtime_call, Label::kFar);
854 // Call runtime on identical symbols since we need to throw a TypeError.
Ben Murdochda12d292016-06-02 14:46:10 +0100855 __ cmpb(ecx, Immediate(SYMBOL_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000856 __ j(equal, &runtime_call, Label::kFar);
857 // Call runtime on identical SIMD values since we must throw a TypeError.
Ben Murdochda12d292016-06-02 14:46:10 +0100858 __ cmpb(ecx, Immediate(SIMD128_VALUE_TYPE));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000859 __ j(equal, &runtime_call, Label::kFar);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000860 }
861 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
862 __ ret(0);
863
864
865 __ bind(&not_identical);
866 }
867
868 // Strict equality can quickly decide whether objects are equal.
869 // Non-strict object equality is slower, so it is handled later in the stub.
870 if (cc == equal && strict()) {
871 Label slow; // Fallthrough label.
872 Label not_smis;
873 // If we're doing a strict equality comparison, we don't have to do
874 // type conversion, so we generate code to do fast comparison for objects
875 // and oddballs. Non-smi numbers and strings still go through the usual
876 // slow-case code.
877 // If either is a Smi (we know that not both are), then they can only
878 // be equal if the other is a HeapNumber. If so, use the slow case.
879 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000880 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 __ mov(ecx, Immediate(kSmiTagMask));
882 __ and_(ecx, eax);
883 __ test(ecx, edx);
884 __ j(not_zero, &not_smis, Label::kNear);
885 // One operand is a smi.
886
887 // Check whether the non-smi is a heap number.
888 STATIC_ASSERT(kSmiTagMask == 1);
889 // ecx still holds eax & kSmiTag, which is either zero or one.
890 __ sub(ecx, Immediate(0x01));
891 __ mov(ebx, edx);
892 __ xor_(ebx, eax);
893 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
894 __ xor_(ebx, eax);
895 // if eax was smi, ebx is now edx, else eax.
896
897 // Check if the non-smi operand is a heap number.
898 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
899 Immediate(isolate()->factory()->heap_number_map()));
900 // If heap number, handle it in the slow case.
901 __ j(equal, &slow, Label::kNear);
902 // Return non-equal (ebx is not zero)
903 __ mov(eax, ebx);
904 __ ret(0);
905
906 __ bind(&not_smis);
907 // If either operand is a JSObject or an oddball value, then they are not
908 // equal since their pointers are different
909 // There is no test for undetectability in strict equality.
910
911 // Get the type of the first operand.
912 // If the first object is a JS object, we have done pointer comparison.
913 Label first_non_object;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
915 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000916 __ j(below, &first_non_object, Label::kNear);
917
918 // Return non-zero (eax is not zero)
919 Label return_not_equal;
920 STATIC_ASSERT(kHeapObjectTag != 0);
921 __ bind(&return_not_equal);
922 __ ret(0);
923
924 __ bind(&first_non_object);
925 // Check for oddballs: true, false, null, undefined.
926 __ CmpInstanceType(ecx, ODDBALL_TYPE);
927 __ j(equal, &return_not_equal);
928
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000929 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 __ j(above_equal, &return_not_equal);
931
932 // Check for oddballs: true, false, null, undefined.
933 __ CmpInstanceType(ecx, ODDBALL_TYPE);
934 __ j(equal, &return_not_equal);
935
936 // Fall through to the general case.
937 __ bind(&slow);
938 }
939
940 // Generate the number comparison code.
941 Label non_number_comparison;
942 Label unordered;
943 __ bind(&generic_heap_number_comparison);
944 FloatingPointHelper::CheckFloatOperands(
945 masm, &non_number_comparison, ebx);
946 FloatingPointHelper::LoadFloatOperand(masm, eax);
947 FloatingPointHelper::LoadFloatOperand(masm, edx);
948 __ FCmp();
949
950 // Don't base result on EFLAGS when a NaN is involved.
951 __ j(parity_even, &unordered, Label::kNear);
952
953 Label below_label, above_label;
954 // Return a result of -1, 0, or 1, based on EFLAGS.
955 __ j(below, &below_label, Label::kNear);
956 __ j(above, &above_label, Label::kNear);
957
958 __ Move(eax, Immediate(0));
959 __ ret(0);
960
961 __ bind(&below_label);
962 __ mov(eax, Immediate(Smi::FromInt(-1)));
963 __ ret(0);
964
965 __ bind(&above_label);
966 __ mov(eax, Immediate(Smi::FromInt(1)));
967 __ ret(0);
968
969 // If one of the numbers was NaN, then the result is always false.
970 // The cc is never not-equal.
971 __ bind(&unordered);
972 DCHECK(cc != not_equal);
973 if (cc == less || cc == less_equal) {
974 __ mov(eax, Immediate(Smi::FromInt(1)));
975 } else {
976 __ mov(eax, Immediate(Smi::FromInt(-1)));
977 }
978 __ ret(0);
979
980 // The number comparison code did not provide a valid result.
981 __ bind(&non_number_comparison);
982
983 // Fast negative check for internalized-to-internalized equality.
984 Label check_for_strings;
985 if (cc == equal) {
986 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
987 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
988
989 // We've already checked for object identity, so if both operands
990 // are internalized they aren't equal. Register eax already holds a
991 // non-zero value, which indicates not equal, so just return.
992 __ ret(0);
993 }
994
995 __ bind(&check_for_strings);
996
997 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
998 &check_unequal_objects);
999
1000 // Inline comparison of one-byte strings.
1001 if (cc == equal) {
1002 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
1003 } else {
1004 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1005 edi);
1006 }
1007#ifdef DEBUG
1008 __ Abort(kUnexpectedFallThroughFromStringComparison);
1009#endif
1010
1011 __ bind(&check_unequal_objects);
1012 if (cc == equal && !strict()) {
1013 // Non-strict equality. Objects are unequal if
1014 // they are both JSObjects and not undetectable,
1015 // and their pointers are different.
Ben Murdochda12d292016-06-02 14:46:10 +01001016 Label return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017 // At most one is a smi, so we can test for smi by adding the two.
1018 // A smi plus a heap object has the low bit set, a heap object plus
1019 // a heap object has the low bit clear.
1020 STATIC_ASSERT(kSmiTag == 0);
1021 STATIC_ASSERT(kSmiTagMask == 1);
1022 __ lea(ecx, Operand(eax, edx, times_1, 0));
1023 __ test(ecx, Immediate(kSmiTagMask));
Ben Murdochda12d292016-06-02 14:46:10 +01001024 __ j(not_zero, &runtime_call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001025
1026 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1027 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1028
1029 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001030 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001031 __ j(not_zero, &undetectable, Label::kNear);
1032 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001033 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001034 __ j(not_zero, &return_unequal, Label::kNear);
1035
1036 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037 __ j(below, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 __ j(below, &runtime_call, Label::kNear);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001040
1041 __ bind(&return_unequal);
1042 // Return non-equal by returning the non-zero object pointer in eax.
1043 __ ret(0); // eax, edx were pushed
1044
1045 __ bind(&undetectable);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001046 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Ben Murdochda12d292016-06-02 14:46:10 +01001047 Immediate(1 << Map::kIsUndetectable));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001048 __ j(zero, &return_unequal, Label::kNear);
Ben Murdochda12d292016-06-02 14:46:10 +01001049
1050 // If both sides are JSReceivers, then the result is false according to
1051 // the HTML specification, which says that only comparisons with null or
1052 // undefined are affected by special casing for document.all.
1053 __ CmpInstanceType(ebx, ODDBALL_TYPE);
1054 __ j(zero, &return_equal, Label::kNear);
1055 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1056 __ j(not_zero, &return_unequal, Label::kNear);
1057
1058 __ bind(&return_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 __ Move(eax, Immediate(EQUAL));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001060 __ ret(0); // eax, edx were pushed
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001061 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001062 __ bind(&runtime_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001063
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 if (cc == equal) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001065 {
1066 FrameScope scope(masm, StackFrame::INTERNAL);
1067 __ Push(edx);
1068 __ Push(eax);
1069 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1070 }
1071 // Turn true into 0 and false into some non-zero value.
1072 STATIC_ASSERT(EQUAL == 0);
1073 __ sub(eax, Immediate(isolate()->factory()->true_value()));
1074 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076 // Push arguments below the return address.
1077 __ pop(ecx);
1078 __ push(edx);
1079 __ push(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081
1082 // Restore return address on the stack.
1083 __ push(ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1085 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086 __ TailCallRuntime(Runtime::kCompare);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001087 }
1088
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001089 __ bind(&miss);
1090 GenerateMiss(masm);
1091}
1092
1093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1095 // eax : number of arguments to the construct function
1096 // ebx : feedback vector
1097 // edx : slot in feedback vector (Smi)
1098 // edi : the function to call
1099
1100 {
1101 FrameScope scope(masm, StackFrame::INTERNAL);
1102
1103 // Number-of-arguments register must be smi-tagged to call out.
1104 __ SmiTag(eax);
1105 __ push(eax);
1106 __ push(edi);
1107 __ push(edx);
1108 __ push(ebx);
1109
1110 __ CallStub(stub);
1111
1112 __ pop(ebx);
1113 __ pop(edx);
1114 __ pop(edi);
1115 __ pop(eax);
1116 __ SmiUntag(eax);
1117 }
1118}
1119
1120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121static void GenerateRecordCallTarget(MacroAssembler* masm) {
1122 // Cache the called function in a feedback vector slot. Cache states
1123 // are uninitialized, monomorphic (indicated by a JSFunction), and
1124 // megamorphic.
1125 // eax : number of arguments to the construct function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126 // ebx : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 // edx : slot in feedback vector (Smi)
1128 // edi : the function to call
1129 Isolate* isolate = masm->isolate();
1130 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001131 Label done_increment_count, done_initialize_count;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132
1133 // Load the cache state into ecx.
1134 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1135 FixedArray::kHeaderSize));
1136
1137 // A monomorphic cache hit or an already megamorphic state: invoke the
1138 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1140 // at this position in a symbol (see static asserts in
1141 // type-feedback-vector.h).
1142 Label check_allocation_site;
1143 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001144 __ j(equal, &done_increment_count, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001145 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 __ j(equal, &done, Label::kFar);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1148 Heap::kWeakCellMapRootIndex);
1149 __ j(not_equal, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151 // If the weak cell is cleared, we have a new chance to become monomorphic.
1152 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1153 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001155 __ bind(&check_allocation_site);
1156 // If we came here, we need to see if we are the array function.
1157 // If we didn't have a matching function, and we didn't find the megamorph
1158 // sentinel, then we have in the slot either some other function or an
1159 // AllocationSite.
1160 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1161 __ j(not_equal, &miss);
1162
1163 // Make sure the function is the Array() function
1164 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1165 __ cmp(edi, ecx);
1166 __ j(not_equal, &megamorphic);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001167 __ jmp(&done_increment_count, Label::kFar);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168
1169 __ bind(&miss);
1170
1171 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1172 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001173 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001174 __ j(equal, &initialize);
1175 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1176 // write-barrier is needed.
1177 __ bind(&megamorphic);
1178 __ mov(
1179 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1180 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1181 __ jmp(&done, Label::kFar);
1182
1183 // An uninitialized cache is patched with the function or sentinel to
1184 // indicate the ElementsKind if function is the Array constructor.
1185 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001186 // Make sure the function is the Array() function
1187 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1188 __ cmp(edi, ecx);
1189 __ j(not_equal, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 // The target function is the Array constructor,
1192 // Create an AllocationSite if we don't already have it, store it in the
1193 // slot.
1194 CreateAllocationSiteStub create_stub(isolate);
1195 CallStubInRecordCallTarget(masm, &create_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001196 __ jmp(&done_initialize_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001197
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001198 __ bind(&not_array_function);
1199 CreateWeakCellStub weak_cell_stub(isolate);
1200 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001201 __ bind(&done_initialize_count);
1202
1203 // Initialize the call counter.
1204 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
1205 FixedArray::kHeaderSize + kPointerSize),
1206 Immediate(Smi::FromInt(1)));
1207 __ jmp(&done);
1208
1209 __ bind(&done_increment_count);
1210 // Increment the call count for monomorphic function calls.
1211 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1212 FixedArray::kHeaderSize + kPointerSize),
1213 Immediate(Smi::FromInt(1)));
1214
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215 __ bind(&done);
1216}
1217
1218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219void CallConstructStub::Generate(MacroAssembler* masm) {
1220 // eax : number of arguments
1221 // ebx : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001222 // edx : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001223 // edi : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 Label non_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 // Check that function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001227 __ JumpIfSmi(edi, &non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 // Check that function is a JSFunction.
1229 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230 __ j(not_equal, &non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001232 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001234 Label feedback_register_initialized;
1235 // Put the AllocationSite from the feedback vector into ebx, or undefined.
1236 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
1237 FixedArray::kHeaderSize));
1238 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
1239 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1240 __ j(equal, &feedback_register_initialized);
1241 __ mov(ebx, isolate()->factory()->undefined_value());
1242 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244 __ AssertUndefinedOrAllocationSite(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001245
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 // Pass new target to construct stub.
1247 __ mov(edx, edi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001249 // Tail call to the function-specific construct stub (still in the caller
1250 // context at this point).
1251 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1252 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
1253 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
1254 __ jmp(ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001256 __ bind(&non_function);
1257 __ mov(edx, edi);
1258 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259}
1260
1261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001263 // edi - function
1264 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001265 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001266 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1267 __ cmp(edi, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001268 __ j(not_equal, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001269
1270 __ mov(eax, arg_count());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001271 // Reload ecx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1273 FixedArray::kHeaderSize));
1274
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001275 // Increment the call count for monomorphic function calls.
1276 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1277 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001278 Immediate(Smi::FromInt(1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001279
1280 __ mov(ebx, ecx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001281 __ mov(edx, edi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001282 ArrayConstructorStub stub(masm->isolate(), arg_count());
1283 __ TailCallStub(&stub);
1284
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 // Unreachable.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286}
1287
1288
1289void CallICStub::Generate(MacroAssembler* masm) {
1290 // edi - function
1291 // edx - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001292 // ebx - vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 Isolate* isolate = masm->isolate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001295 int argc = arg_count();
1296 ParameterCount actual(argc);
1297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001298 // The checks. First, does edi match the recorded monomorphic target?
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001299 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1300 FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001301
1302 // We don't know that we have a weak cell. We might have a private symbol
1303 // or an AllocationSite, but the memory is safe to examine.
1304 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1305 // FixedArray.
1306 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1307 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1308 // computed, meaning that it can't appear to be a pointer. If the low bit is
1309 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1310 // to be a pointer.
1311 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1312 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1313 WeakCell::kValueOffset &&
1314 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1315
1316 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1317 __ j(not_equal, &extra_checks_or_miss);
1318
1319 // The compare above could have been a SMI/SMI comparison. Guard against this
1320 // convincing us that we have a monomorphic JSFunction.
1321 __ JumpIfSmi(edi, &extra_checks_or_miss);
1322
1323 // Increment the call count for monomorphic function calls.
1324 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1325 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001326 Immediate(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001327
1328 __ bind(&call_function);
1329 __ Set(eax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001330 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1331 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 RelocInfo::CODE_TARGET);
1333
1334 __ bind(&extra_checks_or_miss);
1335 Label uninitialized, miss, not_allocation_site;
1336
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001337 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001338 __ j(equal, &call);
1339
1340 // Check if we have an allocation site.
1341 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1342 Heap::kAllocationSiteMapRootIndex);
1343 __ j(not_equal, &not_allocation_site);
1344
1345 // We have an allocation site.
1346 HandleArrayCase(masm, &miss);
1347
1348 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001350 // The following cases attempt to handle MISS cases without going to the
1351 // runtime.
1352 if (FLAG_trace_ic) {
1353 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354 }
1355
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001356 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1357 __ j(equal, &uninitialized);
1358
1359 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1360 // to handle it here. More complex cases are dealt with in the runtime.
1361 __ AssertNotSmi(ecx);
1362 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
1363 __ j(not_equal, &miss);
1364 __ mov(
1365 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1366 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367
1368 __ bind(&call);
1369 __ Set(eax, argc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001370 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001371 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001372
1373 __ bind(&uninitialized);
1374
1375 // We are going monomorphic, provided we actually have a JSFunction.
1376 __ JumpIfSmi(edi, &miss);
1377
1378 // Goto miss case if we do not have a function.
1379 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1380 __ j(not_equal, &miss);
1381
1382 // Make sure the function is not the Array() function, which requires special
1383 // behavior on MISS.
1384 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1385 __ cmp(edi, ecx);
1386 __ j(equal, &miss);
1387
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001388 // Make sure the function belongs to the same native context.
1389 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
1390 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
1391 __ cmp(ecx, NativeContextOperand());
1392 __ j(not_equal, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001393
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001394 // Initialize the call counter.
1395 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
1396 FixedArray::kHeaderSize + kPointerSize),
Ben Murdoch61f157c2016-09-16 13:49:30 +01001397 Immediate(Smi::FromInt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001398
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001399 // Store the function. Use a stub since we need a frame for allocation.
1400 // ebx - vector
1401 // edx - slot
1402 // edi - function
1403 {
1404 FrameScope scope(masm, StackFrame::INTERNAL);
1405 CreateWeakCellStub create_stub(isolate);
1406 __ push(edi);
1407 __ CallStub(&create_stub);
1408 __ pop(edi);
1409 }
1410
1411 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001412
1413 // We are here because tracing is on or we encountered a MISS case we can't
1414 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 __ bind(&miss);
1416 GenerateMiss(masm);
1417
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419
1420 // Unreachable
1421 __ int3();
1422}
1423
1424
1425void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001426 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428 // Push the function and feedback info.
1429 __ push(edi);
1430 __ push(ebx);
1431 __ push(edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001432
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433 // Call the entry.
1434 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 // Move result to edi and exit the internal frame.
1437 __ mov(edi, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438}
1439
1440
1441bool CEntryStub::NeedsImmovableCode() {
1442 return false;
1443}
1444
1445
1446void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1447 CEntryStub::GenerateAheadOfTime(isolate);
1448 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1449 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1450 // It is important that the store buffer overflow stubs are generated first.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001451 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001452 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454 BinaryOpICStub::GenerateAheadOfTime(isolate);
1455 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 StoreFastElementStub::GenerateAheadOfTime(isolate);
1457 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001458}
1459
1460
1461void CodeStub::GenerateFPStubs(Isolate* isolate) {
1462 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1463 // Stubs might already be in the snapshot, detect that and don't regenerate,
1464 // which would lead to code stub initialization state being messed up.
1465 Code* save_doubles_code;
1466 if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
1467 save_doubles_code = *(save_doubles.GetCode());
1468 }
1469 isolate->set_fp_stubs_generated(true);
1470}
1471
1472
1473void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1474 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1475 stub.GetCode();
1476}
1477
1478
1479void CEntryStub::Generate(MacroAssembler* masm) {
1480 // eax: number of arguments including receiver
1481 // ebx: pointer to C function (C callee-saved)
1482 // ebp: frame pointer (restored after C call)
1483 // esp: stack pointer (restored after C call)
1484 // esi: current context (C callee-saved)
1485 // edi: JS function of the caller (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 //
1487 // If argv_in_register():
1488 // ecx: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001489
1490 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1491
Ben Murdoch097c5b22016-05-18 11:27:45 +01001492 // Reserve space on the stack for the three arguments passed to the call. If
1493 // result size is greater than can be returned in registers, also reserve
1494 // space for the hidden argument for the result location, and space for the
1495 // result itself.
1496 int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
1497
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001498 // Enter the exit frame that transitions from JavaScript to C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001499 if (argv_in_register()) {
1500 DCHECK(!save_doubles());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001501 __ EnterApiExitFrame(arg_stack_space);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001502
1503 // Move argc and argv into the correct registers.
1504 __ mov(esi, ecx);
1505 __ mov(edi, eax);
1506 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001507 __ EnterExitFrame(arg_stack_space, save_doubles());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001509
1510 // ebx: pointer to C function (C callee-saved)
1511 // ebp: frame pointer (restored after C call)
1512 // esp: stack pointer (restored after C call)
1513 // edi: number of arguments including receiver (C callee-saved)
1514 // esi: pointer to the first argument (C callee-saved)
1515
1516 // Result returned in eax, or eax+edx if result size is 2.
1517
1518 // Check stack alignment.
1519 if (FLAG_debug_code) {
1520 __ CheckStackAlignment();
1521 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001522 // Call C function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001523 if (result_size() <= 2) {
1524 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
1525 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
1526 __ mov(Operand(esp, 2 * kPointerSize),
1527 Immediate(ExternalReference::isolate_address(isolate())));
1528 } else {
1529 DCHECK_EQ(3, result_size());
1530 // Pass a pointer to the result location as the first argument.
1531 __ lea(eax, Operand(esp, 4 * kPointerSize));
1532 __ mov(Operand(esp, 0 * kPointerSize), eax);
1533 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc.
1534 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv.
1535 __ mov(Operand(esp, 3 * kPointerSize),
1536 Immediate(ExternalReference::isolate_address(isolate())));
1537 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001538 __ call(ebx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001539
1540 if (result_size() > 2) {
1541 DCHECK_EQ(3, result_size());
1542#ifndef _WIN32
1543 // Restore the "hidden" argument on the stack which was popped by caller.
1544 __ sub(esp, Immediate(kPointerSize));
1545#endif
1546 // Read result values stored on stack. Result is stored above the arguments.
1547 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
1548 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
1549 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
1550 }
1551 // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 // Check result for exception sentinel.
1554 Label exception_returned;
1555 __ cmp(eax, isolate()->factory()->exception());
1556 __ j(equal, &exception_returned);
1557
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001558 // Check that there is no pending exception, otherwise we
1559 // should have returned the exception sentinel.
1560 if (FLAG_debug_code) {
1561 __ push(edx);
1562 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1563 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 ExternalReference pending_exception_address(
1565 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
1567 // Cannot use check here as it attempts to generate call into runtime.
1568 __ j(equal, &okay, Label::kNear);
1569 __ int3();
1570 __ bind(&okay);
1571 __ pop(edx);
1572 }
1573
1574 // Exit the JavaScript to C++ exit frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 __ LeaveExitFrame(save_doubles(), !argv_in_register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001576 __ ret(0);
1577
1578 // Handling of exception.
1579 __ bind(&exception_returned);
1580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 ExternalReference pending_handler_context_address(
1582 Isolate::kPendingHandlerContextAddress, isolate());
1583 ExternalReference pending_handler_code_address(
1584 Isolate::kPendingHandlerCodeAddress, isolate());
1585 ExternalReference pending_handler_offset_address(
1586 Isolate::kPendingHandlerOffsetAddress, isolate());
1587 ExternalReference pending_handler_fp_address(
1588 Isolate::kPendingHandlerFPAddress, isolate());
1589 ExternalReference pending_handler_sp_address(
1590 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001591
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 // Ask the runtime for help to determine the handler. This will set eax to
1593 // contain the current pending exception, don't clobber it.
1594 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1595 isolate());
1596 {
1597 FrameScope scope(masm, StackFrame::MANUAL);
1598 __ PrepareCallCFunction(3, eax);
1599 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc.
1600 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv.
1601 __ mov(Operand(esp, 2 * kPointerSize),
1602 Immediate(ExternalReference::isolate_address(isolate())));
1603 __ CallCFunction(find_handler, 3);
1604 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001605
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001606 // Retrieve the handler context, SP and FP.
1607 __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
1608 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
1609 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001610
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 // If the handler is a JS frame, restore the context to the frame. Note that
1612 // the context will be set to (esi == 0) for non-JS frames.
1613 Label skip;
1614 __ test(esi, esi);
1615 __ j(zero, &skip, Label::kNear);
1616 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1617 __ bind(&skip);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001619 // Compute the handler entry address and jump to it.
1620 __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
1621 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001622 // Check whether it's a turbofanned exception handler code before jump to it.
1623 Label not_turbo;
1624 __ push(eax);
1625 __ mov(eax, Operand(edi, Code::kKindSpecificFlags1Offset - kHeapObjectTag));
1626 __ and_(eax, Immediate(1 << Code::kIsTurbofannedBit));
1627 __ j(zero, &not_turbo);
1628 __ fninit();
1629 __ fld1();
1630 __ bind(&not_turbo);
1631 __ pop(eax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1633 __ jmp(edi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634}
1635
1636
1637void JSEntryStub::Generate(MacroAssembler* masm) {
1638 Label invoke, handler_entry, exit;
1639 Label not_outermost_js, not_outermost_js_2;
1640
1641 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1642
1643 // Set up frame.
1644 __ push(ebp);
1645 __ mov(ebp, esp);
1646
1647 // Push marker in two places.
1648 int marker = type();
Ben Murdochda12d292016-06-02 14:46:10 +01001649 __ push(Immediate(Smi::FromInt(marker))); // marker
1650 ExternalReference context_address(Isolate::kContextAddress, isolate());
1651 __ push(Operand::StaticVariable(context_address)); // context
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001652 // Save callee-saved registers (C calling conventions).
1653 __ push(edi);
1654 __ push(esi);
1655 __ push(ebx);
1656
1657 // Save copies of the top frame descriptor on the stack.
1658 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1659 __ push(Operand::StaticVariable(c_entry_fp));
1660
1661 // If this is the outermost JS call, set js_entry_sp value.
1662 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1663 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
1664 __ j(not_equal, &not_outermost_js, Label::kNear);
1665 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
1666 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1667 __ jmp(&invoke, Label::kNear);
1668 __ bind(&not_outermost_js);
1669 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1670
1671 // Jump to a faked try block that does the invoke, with a faked catch
1672 // block that sets the pending exception.
1673 __ jmp(&invoke);
1674 __ bind(&handler_entry);
1675 handler_offset_ = handler_entry.pos();
1676 // Caught exception: Store result (exception) in the pending exception
1677 // field in the JSEnv and return a failure sentinel.
1678 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1679 isolate());
1680 __ mov(Operand::StaticVariable(pending_exception), eax);
1681 __ mov(eax, Immediate(isolate()->factory()->exception()));
1682 __ jmp(&exit);
1683
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001684 // Invoke: Link this frame into the handler chain.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001685 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686 __ PushStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001687
1688 // Clear any pending exceptions.
1689 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1690 __ mov(Operand::StaticVariable(pending_exception), edx);
1691
1692 // Fake a receiver (NULL).
1693 __ push(Immediate(0)); // receiver
1694
1695 // Invoke the function by calling through JS entry trampoline builtin and
1696 // pop the faked function when we return. Notice that we cannot store a
1697 // reference to the trampoline code directly in this stub, because the
1698 // builtin stubs may not have been generated yet.
1699 if (type() == StackFrame::ENTRY_CONSTRUCT) {
1700 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1701 isolate());
1702 __ mov(edx, Immediate(construct_entry));
1703 } else {
1704 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1705 __ mov(edx, Immediate(entry));
1706 }
1707 __ mov(edx, Operand(edx, 0)); // deref address
1708 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
1709 __ call(edx);
1710
1711 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001712 __ PopStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713
1714 __ bind(&exit);
1715 // Check if the current stack frame is marked as the outermost JS frame.
1716 __ pop(ebx);
1717 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1718 __ j(not_equal, &not_outermost_js_2);
1719 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
1720 __ bind(&not_outermost_js_2);
1721
1722 // Restore the top frame descriptor from the stack.
1723 __ pop(Operand::StaticVariable(ExternalReference(
1724 Isolate::kCEntryFPAddress, isolate())));
1725
1726 // Restore callee-saved registers (C calling conventions).
1727 __ pop(ebx);
1728 __ pop(esi);
1729 __ pop(edi);
1730 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
1731
1732 // Restore frame pointer and return.
1733 __ pop(ebp);
1734 __ ret(0);
1735}
1736
1737
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738// -------------------------------------------------------------------------
1739// StringCharCodeAtGenerator
1740
1741void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1742 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001743 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1744 __ JumpIfSmi(object_, receiver_not_string_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001745
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001746 // Fetch the instance type of the receiver into result register.
1747 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1748 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1749 // If the receiver is not a string trigger the non-string case.
1750 __ test(result_, Immediate(kIsNotStringMask));
1751 __ j(not_zero, receiver_not_string_);
1752 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753
1754 // If the index is non-smi trigger the non-smi case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 __ JumpIfNotSmi(index_, &index_not_smi_);
1756 __ bind(&got_smi_index_);
1757
1758 // Check for index out of range.
1759 __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
1760 __ j(above_equal, index_out_of_range_);
1761
1762 __ SmiUntag(index_);
1763
1764 Factory* factory = masm->isolate()->factory();
1765 StringCharLoadGenerator::Generate(
1766 masm, factory, object_, index_, result_, &call_runtime_);
1767
1768 __ SmiTag(result_);
1769 __ bind(&exit_);
1770}
1771
1772
1773void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001774 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 const RuntimeCallHelper& call_helper) {
1776 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1777
1778 // Index is not a smi.
1779 __ bind(&index_not_smi_);
1780 // If index is a heap number, try converting it to an integer.
1781 __ CheckMap(index_,
1782 masm->isolate()->factory()->heap_number_map(),
1783 index_not_number_,
1784 DONT_DO_SMI_CHECK);
1785 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786 if (embed_mode == PART_OF_IC_HANDLER) {
1787 __ push(LoadWithVectorDescriptor::VectorRegister());
1788 __ push(LoadDescriptor::SlotRegister());
1789 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001790 __ push(object_);
1791 __ push(index_); // Consumed by runtime conversion function.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001792 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 if (!index_.is(eax)) {
1794 // Save the conversion result before the pop instructions below
1795 // have a chance to overwrite it.
1796 __ mov(index_, eax);
1797 }
1798 __ pop(object_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001799 if (embed_mode == PART_OF_IC_HANDLER) {
1800 __ pop(LoadDescriptor::SlotRegister());
1801 __ pop(LoadWithVectorDescriptor::VectorRegister());
1802 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001803 // Reload the instance type.
1804 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1805 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1806 call_helper.AfterCall(masm);
1807 // If index is still not a smi, it must be out of range.
1808 STATIC_ASSERT(kSmiTag == 0);
1809 __ JumpIfNotSmi(index_, index_out_of_range_);
1810 // Otherwise, return to the fast path.
1811 __ jmp(&got_smi_index_);
1812
1813 // Call runtime. We get here when the receiver is a string and the
1814 // index is a number, but the code of getting the actual character
1815 // is too complex (e.g., when the string needs to be flattened).
1816 __ bind(&call_runtime_);
1817 call_helper.BeforeCall(masm);
1818 __ push(object_);
1819 __ SmiTag(index_);
1820 __ push(index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001821 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001822 if (!result_.is(eax)) {
1823 __ mov(result_, eax);
1824 }
1825 call_helper.AfterCall(masm);
1826 __ jmp(&exit_);
1827
1828 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1829}
1830
1831
1832// -------------------------------------------------------------------------
1833// StringCharFromCodeGenerator
1834
1835void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
1836 // Fast case of Heap::LookupSingleCharacterStringFromCode.
1837 STATIC_ASSERT(kSmiTag == 0);
1838 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001839 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
1840 __ test(code_, Immediate(kSmiTagMask |
1841 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001842 __ j(not_zero, &slow_case_);
1843
1844 Factory* factory = masm->isolate()->factory();
1845 __ Move(result_, Immediate(factory->single_character_string_cache()));
1846 STATIC_ASSERT(kSmiTag == 0);
1847 STATIC_ASSERT(kSmiTagSize == 1);
1848 STATIC_ASSERT(kSmiShiftSize == 0);
1849 // At this point code register contains smi tagged one byte char code.
1850 __ mov(result_, FieldOperand(result_,
1851 code_, times_half_pointer_size,
1852 FixedArray::kHeaderSize));
1853 __ cmp(result_, factory->undefined_value());
1854 __ j(equal, &slow_case_);
1855 __ bind(&exit_);
1856}
1857
1858
1859void StringCharFromCodeGenerator::GenerateSlow(
1860 MacroAssembler* masm,
1861 const RuntimeCallHelper& call_helper) {
1862 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
1863
1864 __ bind(&slow_case_);
1865 call_helper.BeforeCall(masm);
1866 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867 __ CallRuntime(Runtime::kStringCharFromCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001868 if (!result_.is(eax)) {
1869 __ mov(result_, eax);
1870 }
1871 call_helper.AfterCall(masm);
1872 __ jmp(&exit_);
1873
1874 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
1875}
1876
1877
1878void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
1879 Register dest,
1880 Register src,
1881 Register count,
1882 Register scratch,
1883 String::Encoding encoding) {
1884 DCHECK(!scratch.is(dest));
1885 DCHECK(!scratch.is(src));
1886 DCHECK(!scratch.is(count));
1887
1888 // Nothing to do for zero characters.
1889 Label done;
1890 __ test(count, count);
1891 __ j(zero, &done);
1892
1893 // Make count the number of bytes to copy.
1894 if (encoding == String::TWO_BYTE_ENCODING) {
1895 __ shl(count, 1);
1896 }
1897
1898 Label loop;
1899 __ bind(&loop);
1900 __ mov_b(scratch, Operand(src, 0));
1901 __ mov_b(Operand(dest, 0), scratch);
1902 __ inc(src);
1903 __ inc(dest);
1904 __ dec(count);
1905 __ j(not_zero, &loop);
1906
1907 __ bind(&done);
1908}
1909
1910
1911void SubStringStub::Generate(MacroAssembler* masm) {
1912 Label runtime;
1913
1914 // Stack frame on entry.
1915 // esp[0]: return address
1916 // esp[4]: to
1917 // esp[8]: from
1918 // esp[12]: string
1919
1920 // Make sure first argument is a string.
1921 __ mov(eax, Operand(esp, 3 * kPointerSize));
1922 STATIC_ASSERT(kSmiTag == 0);
1923 __ JumpIfSmi(eax, &runtime);
1924 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
1925 __ j(NegateCondition(is_string), &runtime);
1926
1927 // eax: string
1928 // ebx: instance type
1929
1930 // Calculate length of sub string using the smi values.
1931 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
1932 __ JumpIfNotSmi(ecx, &runtime);
1933 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
1934 __ JumpIfNotSmi(edx, &runtime);
1935 __ sub(ecx, edx);
1936 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
1937 Label not_original_string;
1938 // Shorter than original string's length: an actual substring.
1939 __ j(below, &not_original_string, Label::kNear);
1940 // Longer than original string's length or negative: unsafe arguments.
1941 __ j(above, &runtime);
1942 // Return original string.
1943 Counters* counters = isolate()->counters();
1944 __ IncrementCounter(counters->sub_string_native(), 1);
1945 __ ret(3 * kPointerSize);
1946 __ bind(&not_original_string);
1947
1948 Label single_char;
1949 __ cmp(ecx, Immediate(Smi::FromInt(1)));
1950 __ j(equal, &single_char);
1951
1952 // eax: string
1953 // ebx: instance type
1954 // ecx: sub string length (smi)
1955 // edx: from index (smi)
1956 // Deal with different string types: update the index if necessary
1957 // and put the underlying string into edi.
1958 Label underlying_unpacked, sliced_string, seq_or_external_string;
1959 // If the string is not indirect, it can only be sequential or external.
1960 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
1961 STATIC_ASSERT(kIsIndirectStringMask != 0);
1962 __ test(ebx, Immediate(kIsIndirectStringMask));
1963 __ j(zero, &seq_or_external_string, Label::kNear);
1964
1965 Factory* factory = isolate()->factory();
1966 __ test(ebx, Immediate(kSlicedNotConsMask));
1967 __ j(not_zero, &sliced_string, Label::kNear);
1968 // Cons string. Check whether it is flat, then fetch first part.
1969 // Flat cons strings have an empty second part.
1970 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
1971 factory->empty_string());
1972 __ j(not_equal, &runtime);
1973 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
1974 // Update instance type.
1975 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
1976 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1977 __ jmp(&underlying_unpacked, Label::kNear);
1978
1979 __ bind(&sliced_string);
1980 // Sliced string. Fetch parent and adjust start index by offset.
1981 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
1982 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
1983 // Update instance type.
1984 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
1985 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1986 __ jmp(&underlying_unpacked, Label::kNear);
1987
1988 __ bind(&seq_or_external_string);
1989 // Sequential or external string. Just move string to the expected register.
1990 __ mov(edi, eax);
1991
1992 __ bind(&underlying_unpacked);
1993
1994 if (FLAG_string_slices) {
1995 Label copy_routine;
1996 // edi: underlying subject string
1997 // ebx: instance type of underlying subject string
1998 // edx: adjusted start index (smi)
1999 // ecx: length (smi)
2000 __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
2001 // Short slice. Copy instead of slicing.
2002 __ j(less, &copy_routine);
2003 // Allocate new sliced string. At this point we do not reload the instance
2004 // type including the string encoding because we simply rely on the info
2005 // provided by the original string. It does not matter if the original
2006 // string's encoding is wrong because we always have to recheck encoding of
2007 // the newly created string's parent anyways due to externalized strings.
2008 Label two_byte_slice, set_slice_header;
2009 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
2010 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2011 __ test(ebx, Immediate(kStringEncodingMask));
2012 __ j(zero, &two_byte_slice, Label::kNear);
2013 __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
2014 __ jmp(&set_slice_header, Label::kNear);
2015 __ bind(&two_byte_slice);
2016 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
2017 __ bind(&set_slice_header);
2018 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
2019 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
2020 Immediate(String::kEmptyHashField));
2021 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
2022 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
2023 __ IncrementCounter(counters->sub_string_native(), 1);
2024 __ ret(3 * kPointerSize);
2025
2026 __ bind(&copy_routine);
2027 }
2028
2029 // edi: underlying subject string
2030 // ebx: instance type of underlying subject string
2031 // edx: adjusted start index (smi)
2032 // ecx: length (smi)
2033 // The subject string can only be external or sequential string of either
2034 // encoding at this point.
2035 Label two_byte_sequential, runtime_drop_two, sequential_string;
2036 STATIC_ASSERT(kExternalStringTag != 0);
2037 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002038 __ test_b(ebx, Immediate(kExternalStringTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002039 __ j(zero, &sequential_string);
2040
2041 // Handle external string.
2042 // Rule out short external strings.
2043 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002044 __ test_b(ebx, Immediate(kShortExternalStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002045 __ j(not_zero, &runtime);
2046 __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
2047 // Move the pointer so that offset-wise, it looks like a sequential string.
2048 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2049 __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2050
2051 __ bind(&sequential_string);
2052 // Stash away (adjusted) index and (underlying) string.
2053 __ push(edx);
2054 __ push(edi);
2055 __ SmiUntag(ecx);
2056 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdochda12d292016-06-02 14:46:10 +01002057 __ test_b(ebx, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058 __ j(zero, &two_byte_sequential);
2059
2060 // Sequential one byte string. Allocate the result.
2061 __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2062
2063 // eax: result string
2064 // ecx: result string length
2065 // Locate first character of result.
2066 __ mov(edi, eax);
2067 __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2068 // Load string argument and locate character of sub string start.
2069 __ pop(edx);
2070 __ pop(ebx);
2071 __ SmiUntag(ebx);
2072 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
2073
2074 // eax: result string
2075 // ecx: result length
2076 // edi: first character of result
2077 // edx: character of sub string start
2078 StringHelper::GenerateCopyCharacters(
2079 masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
2080 __ IncrementCounter(counters->sub_string_native(), 1);
2081 __ ret(3 * kPointerSize);
2082
2083 __ bind(&two_byte_sequential);
2084 // Sequential two-byte string. Allocate the result.
2085 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2086
2087 // eax: result string
2088 // ecx: result string length
2089 // Locate first character of result.
2090 __ mov(edi, eax);
2091 __ add(edi,
2092 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2093 // Load string argument and locate character of sub string start.
2094 __ pop(edx);
2095 __ pop(ebx);
2096 // As from is a smi it is 2 times the value which matches the size of a two
2097 // byte character.
2098 STATIC_ASSERT(kSmiTag == 0);
2099 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
2100 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
2101
2102 // eax: result string
2103 // ecx: result length
2104 // edi: first character of result
2105 // edx: character of sub string start
2106 StringHelper::GenerateCopyCharacters(
2107 masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
2108 __ IncrementCounter(counters->sub_string_native(), 1);
2109 __ ret(3 * kPointerSize);
2110
2111 // Drop pushed values on the stack before tail call.
2112 __ bind(&runtime_drop_two);
2113 __ Drop(2);
2114
2115 // Just jump to runtime to create the sub string.
2116 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002117 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118
2119 __ bind(&single_char);
2120 // eax: string
2121 // ebx: instance type
2122 // ecx: sub string length (smi)
2123 // edx: from index (smi)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002124 StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002125 &runtime, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002126 generator.GenerateFast(masm);
2127 __ ret(3 * kPointerSize);
2128 generator.SkipSlow(masm, &runtime);
2129}
2130
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002131void ToStringStub::Generate(MacroAssembler* masm) {
2132 // The ToString stub takes one argument in eax.
2133 Label is_number;
2134 __ JumpIfSmi(eax, &is_number, Label::kNear);
2135
2136 Label not_string;
2137 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
2138 // eax: receiver
2139 // edi: receiver map
2140 __ j(above_equal, &not_string, Label::kNear);
2141 __ Ret();
2142 __ bind(&not_string);
2143
2144 Label not_heap_number;
2145 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2146 __ j(not_equal, &not_heap_number, Label::kNear);
2147 __ bind(&is_number);
2148 NumberToStringStub stub(isolate());
2149 __ TailCallStub(&stub);
2150 __ bind(&not_heap_number);
2151
2152 Label not_oddball;
2153 __ CmpInstanceType(edi, ODDBALL_TYPE);
2154 __ j(not_equal, &not_oddball, Label::kNear);
2155 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
2156 __ Ret();
2157 __ bind(&not_oddball);
2158
2159 __ pop(ecx); // Pop return address.
2160 __ push(eax); // Push argument.
2161 __ push(ecx); // Push return address.
2162 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002163}
2164
2165
Ben Murdoch097c5b22016-05-18 11:27:45 +01002166void ToNameStub::Generate(MacroAssembler* masm) {
2167 // The ToName stub takes one argument in eax.
2168 Label is_number;
2169 __ JumpIfSmi(eax, &is_number, Label::kNear);
2170
2171 Label not_name;
2172 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2173 __ CmpObjectType(eax, LAST_NAME_TYPE, edi);
2174 // eax: receiver
2175 // edi: receiver map
2176 __ j(above, &not_name, Label::kNear);
2177 __ Ret();
2178 __ bind(&not_name);
2179
2180 Label not_heap_number;
2181 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2182 __ j(not_equal, &not_heap_number, Label::kNear);
2183 __ bind(&is_number);
2184 NumberToStringStub stub(isolate());
2185 __ TailCallStub(&stub);
2186 __ bind(&not_heap_number);
2187
2188 Label not_oddball;
2189 __ CmpInstanceType(edi, ODDBALL_TYPE);
2190 __ j(not_equal, &not_oddball, Label::kNear);
2191 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
2192 __ Ret();
2193 __ bind(&not_oddball);
2194
2195 __ pop(ecx); // Pop return address.
2196 __ push(eax); // Push argument.
2197 __ push(ecx); // Push return address.
2198 __ TailCallRuntime(Runtime::kToName);
2199}
2200
2201
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002202void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
2203 Register left,
2204 Register right,
2205 Register scratch1,
2206 Register scratch2) {
2207 Register length = scratch1;
2208
2209 // Compare lengths.
2210 Label strings_not_equal, check_zero_length;
2211 __ mov(length, FieldOperand(left, String::kLengthOffset));
2212 __ cmp(length, FieldOperand(right, String::kLengthOffset));
2213 __ j(equal, &check_zero_length, Label::kNear);
2214 __ bind(&strings_not_equal);
2215 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
2216 __ ret(0);
2217
2218 // Check if the length is zero.
2219 Label compare_chars;
2220 __ bind(&check_zero_length);
2221 STATIC_ASSERT(kSmiTag == 0);
2222 __ test(length, length);
2223 __ j(not_zero, &compare_chars, Label::kNear);
2224 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2225 __ ret(0);
2226
2227 // Compare characters.
2228 __ bind(&compare_chars);
2229 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
2230 &strings_not_equal, Label::kNear);
2231
2232 // Characters are equal.
2233 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2234 __ ret(0);
2235}
2236
2237
2238void StringHelper::GenerateCompareFlatOneByteStrings(
2239 MacroAssembler* masm, Register left, Register right, Register scratch1,
2240 Register scratch2, Register scratch3) {
2241 Counters* counters = masm->isolate()->counters();
2242 __ IncrementCounter(counters->string_compare_native(), 1);
2243
2244 // Find minimum length.
2245 Label left_shorter;
2246 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
2247 __ mov(scratch3, scratch1);
2248 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
2249
2250 Register length_delta = scratch3;
2251
2252 __ j(less_equal, &left_shorter, Label::kNear);
2253 // Right string is shorter. Change scratch1 to be length of right string.
2254 __ sub(scratch1, length_delta);
2255 __ bind(&left_shorter);
2256
2257 Register min_length = scratch1;
2258
2259 // If either length is zero, just compare lengths.
2260 Label compare_lengths;
2261 __ test(min_length, min_length);
2262 __ j(zero, &compare_lengths, Label::kNear);
2263
2264 // Compare characters.
2265 Label result_not_equal;
2266 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2267 &result_not_equal, Label::kNear);
2268
2269 // Compare lengths - strings up to min-length are equal.
2270 __ bind(&compare_lengths);
2271 __ test(length_delta, length_delta);
2272 Label length_not_equal;
2273 __ j(not_zero, &length_not_equal, Label::kNear);
2274
2275 // Result is EQUAL.
2276 STATIC_ASSERT(EQUAL == 0);
2277 STATIC_ASSERT(kSmiTag == 0);
2278 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2279 __ ret(0);
2280
2281 Label result_greater;
2282 Label result_less;
2283 __ bind(&length_not_equal);
2284 __ j(greater, &result_greater, Label::kNear);
2285 __ jmp(&result_less, Label::kNear);
2286 __ bind(&result_not_equal);
2287 __ j(above, &result_greater, Label::kNear);
2288 __ bind(&result_less);
2289
2290 // Result is LESS.
2291 __ Move(eax, Immediate(Smi::FromInt(LESS)));
2292 __ ret(0);
2293
2294 // Result is GREATER.
2295 __ bind(&result_greater);
2296 __ Move(eax, Immediate(Smi::FromInt(GREATER)));
2297 __ ret(0);
2298}
2299
2300
2301void StringHelper::GenerateOneByteCharsCompareLoop(
2302 MacroAssembler* masm, Register left, Register right, Register length,
2303 Register scratch, Label* chars_not_equal,
2304 Label::Distance chars_not_equal_near) {
2305 // Change index to run from -length to -1 by adding length to string
2306 // start. This means that loop ends when index reaches zero, which
2307 // doesn't need an additional compare.
2308 __ SmiUntag(length);
2309 __ lea(left,
2310 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2311 __ lea(right,
2312 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2313 __ neg(length);
2314 Register index = length; // index = -length;
2315
2316 // Compare loop.
2317 Label loop;
2318 __ bind(&loop);
2319 __ mov_b(scratch, Operand(left, index, times_1, 0));
2320 __ cmpb(scratch, Operand(right, index, times_1, 0));
2321 __ j(not_equal, chars_not_equal, chars_not_equal_near);
2322 __ inc(index);
2323 __ j(not_zero, &loop);
2324}
2325
2326
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002327void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2328 // ----------- S t a t e -------------
2329 // -- edx : left
2330 // -- eax : right
2331 // -- esp[0] : return address
2332 // -----------------------------------
2333
2334 // Load ecx with the allocation site. We stick an undefined dummy value here
2335 // and replace it with the real allocation site later when we instantiate this
2336 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
Ben Murdoch61f157c2016-09-16 13:49:30 +01002337 __ mov(ecx, isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338
2339 // Make sure that we actually patched the allocation site.
2340 if (FLAG_debug_code) {
2341 __ test(ecx, Immediate(kSmiTagMask));
2342 __ Assert(not_equal, kExpectedAllocationSite);
2343 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
2344 isolate()->factory()->allocation_site_map());
2345 __ Assert(equal, kExpectedAllocationSite);
2346 }
2347
2348 // Tail call into the stub that handles binary operations with allocation
2349 // sites.
2350 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2351 __ TailCallStub(&stub);
2352}
2353
2354
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002355void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2356 DCHECK_EQ(CompareICState::BOOLEAN, state());
2357 Label miss;
2358 Label::Distance const miss_distance =
2359 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2360
2361 __ JumpIfSmi(edx, &miss, miss_distance);
2362 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
2363 __ JumpIfSmi(eax, &miss, miss_distance);
2364 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2365 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2366 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002367 if (!Token::IsEqualityOp(op())) {
2368 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2369 __ AssertSmi(eax);
2370 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
2371 __ AssertSmi(edx);
2372 __ xchg(eax, edx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002373 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002374 __ sub(eax, edx);
2375 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002376
2377 __ bind(&miss);
2378 GenerateMiss(masm);
2379}
2380
2381
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002382void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2383 DCHECK(state() == CompareICState::SMI);
2384 Label miss;
2385 __ mov(ecx, edx);
2386 __ or_(ecx, eax);
2387 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
2388
2389 if (GetCondition() == equal) {
2390 // For equality we do not care about the sign of the result.
2391 __ sub(eax, edx);
2392 } else {
2393 Label done;
2394 __ sub(edx, eax);
2395 __ j(no_overflow, &done, Label::kNear);
2396 // Correct sign of result in case of overflow.
2397 __ not_(edx);
2398 __ bind(&done);
2399 __ mov(eax, edx);
2400 }
2401 __ ret(0);
2402
2403 __ bind(&miss);
2404 GenerateMiss(masm);
2405}
2406
2407
2408void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2409 DCHECK(state() == CompareICState::NUMBER);
2410
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002411 Label generic_stub, check_left;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002412 Label unordered, maybe_undefined1, maybe_undefined2;
2413 Label miss;
2414
2415 if (left() == CompareICState::SMI) {
2416 __ JumpIfNotSmi(edx, &miss);
2417 }
2418 if (right() == CompareICState::SMI) {
2419 __ JumpIfNotSmi(eax, &miss);
2420 }
2421
2422 // Inlining the double comparison and falling back to the general compare
2423 // stub if NaN is involved or SSE2 or CMOV is unsupported.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002424 __ JumpIfSmi(eax, &check_left, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002425 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2426 isolate()->factory()->heap_number_map());
2427 __ j(not_equal, &maybe_undefined1, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002428
2429 __ bind(&check_left);
2430 __ JumpIfSmi(edx, &generic_stub, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002431 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2432 isolate()->factory()->heap_number_map());
2433 __ j(not_equal, &maybe_undefined2, Label::kNear);
2434
2435 __ bind(&unordered);
2436 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002437 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002438 CompareICState::GENERIC, CompareICState::GENERIC);
2439 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2440
2441 __ bind(&maybe_undefined1);
2442 if (Token::IsOrderedRelationalCompareOp(op())) {
2443 __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
2444 __ j(not_equal, &miss);
2445 __ JumpIfSmi(edx, &unordered);
2446 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
2447 __ j(not_equal, &maybe_undefined2, Label::kNear);
2448 __ jmp(&unordered);
2449 }
2450
2451 __ bind(&maybe_undefined2);
2452 if (Token::IsOrderedRelationalCompareOp(op())) {
2453 __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
2454 __ j(equal, &unordered);
2455 }
2456
2457 __ bind(&miss);
2458 GenerateMiss(masm);
2459}
2460
2461
2462void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2463 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2464 DCHECK(GetCondition() == equal);
2465
2466 // Registers containing left and right operands respectively.
2467 Register left = edx;
2468 Register right = eax;
2469 Register tmp1 = ecx;
2470 Register tmp2 = ebx;
2471
2472 // Check that both operands are heap objects.
2473 Label miss;
2474 __ mov(tmp1, left);
2475 STATIC_ASSERT(kSmiTag == 0);
2476 __ and_(tmp1, right);
2477 __ JumpIfSmi(tmp1, &miss, Label::kNear);
2478
2479 // Check that both operands are internalized strings.
2480 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2481 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2482 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2483 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2484 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2485 __ or_(tmp1, tmp2);
2486 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2487 __ j(not_zero, &miss, Label::kNear);
2488
2489 // Internalized strings are compared by identity.
2490 Label done;
2491 __ cmp(left, right);
2492 // Make sure eax is non-zero. At this point input operands are
2493 // guaranteed to be non-zero.
2494 DCHECK(right.is(eax));
2495 __ j(not_equal, &done, Label::kNear);
2496 STATIC_ASSERT(EQUAL == 0);
2497 STATIC_ASSERT(kSmiTag == 0);
2498 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2499 __ bind(&done);
2500 __ ret(0);
2501
2502 __ bind(&miss);
2503 GenerateMiss(masm);
2504}
2505
2506
2507void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2508 DCHECK(state() == CompareICState::UNIQUE_NAME);
2509 DCHECK(GetCondition() == equal);
2510
2511 // Registers containing left and right operands respectively.
2512 Register left = edx;
2513 Register right = eax;
2514 Register tmp1 = ecx;
2515 Register tmp2 = ebx;
2516
2517 // Check that both operands are heap objects.
2518 Label miss;
2519 __ mov(tmp1, left);
2520 STATIC_ASSERT(kSmiTag == 0);
2521 __ and_(tmp1, right);
2522 __ JumpIfSmi(tmp1, &miss, Label::kNear);
2523
2524 // Check that both operands are unique names. This leaves the instance
2525 // types loaded in tmp1 and tmp2.
2526 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2527 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2528 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2529 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2530
2531 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2532 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2533
2534 // Unique names are compared by identity.
2535 Label done;
2536 __ cmp(left, right);
2537 // Make sure eax is non-zero. At this point input operands are
2538 // guaranteed to be non-zero.
2539 DCHECK(right.is(eax));
2540 __ j(not_equal, &done, Label::kNear);
2541 STATIC_ASSERT(EQUAL == 0);
2542 STATIC_ASSERT(kSmiTag == 0);
2543 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2544 __ bind(&done);
2545 __ ret(0);
2546
2547 __ bind(&miss);
2548 GenerateMiss(masm);
2549}
2550
2551
2552void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2553 DCHECK(state() == CompareICState::STRING);
2554 Label miss;
2555
2556 bool equality = Token::IsEqualityOp(op());
2557
2558 // Registers containing left and right operands respectively.
2559 Register left = edx;
2560 Register right = eax;
2561 Register tmp1 = ecx;
2562 Register tmp2 = ebx;
2563 Register tmp3 = edi;
2564
2565 // Check that both operands are heap objects.
2566 __ mov(tmp1, left);
2567 STATIC_ASSERT(kSmiTag == 0);
2568 __ and_(tmp1, right);
2569 __ JumpIfSmi(tmp1, &miss);
2570
2571 // Check that both operands are strings. This leaves the instance
2572 // types loaded in tmp1 and tmp2.
2573 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2574 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2575 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2576 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2577 __ mov(tmp3, tmp1);
2578 STATIC_ASSERT(kNotStringTag != 0);
2579 __ or_(tmp3, tmp2);
2580 __ test(tmp3, Immediate(kIsNotStringMask));
2581 __ j(not_zero, &miss);
2582
2583 // Fast check for identical strings.
2584 Label not_same;
2585 __ cmp(left, right);
2586 __ j(not_equal, &not_same, Label::kNear);
2587 STATIC_ASSERT(EQUAL == 0);
2588 STATIC_ASSERT(kSmiTag == 0);
2589 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2590 __ ret(0);
2591
2592 // Handle not identical strings.
2593 __ bind(&not_same);
2594
2595 // Check that both strings are internalized. If they are, we're done
2596 // because we already know they are not identical. But in the case of
2597 // non-equality compare, we still need to determine the order. We
2598 // also know they are both strings.
2599 if (equality) {
2600 Label do_compare;
2601 STATIC_ASSERT(kInternalizedTag == 0);
2602 __ or_(tmp1, tmp2);
2603 __ test(tmp1, Immediate(kIsNotInternalizedMask));
2604 __ j(not_zero, &do_compare, Label::kNear);
2605 // Make sure eax is non-zero. At this point input operands are
2606 // guaranteed to be non-zero.
2607 DCHECK(right.is(eax));
2608 __ ret(0);
2609 __ bind(&do_compare);
2610 }
2611
2612 // Check that both strings are sequential one-byte.
2613 Label runtime;
2614 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
2615
2616 // Compare flat one byte strings. Returns when done.
2617 if (equality) {
2618 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2619 tmp2);
2620 } else {
2621 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2622 tmp2, tmp3);
2623 }
2624
2625 // Handle more complex cases in runtime.
2626 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002627 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01002628 {
2629 FrameScope scope(masm, StackFrame::INTERNAL);
2630 __ Push(left);
2631 __ Push(right);
2632 __ CallRuntime(Runtime::kStringEqual);
2633 }
2634 __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
2635 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002636 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01002637 __ pop(tmp1); // Return address.
2638 __ push(left);
2639 __ push(right);
2640 __ push(tmp1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002641 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002642 }
2643
2644 __ bind(&miss);
2645 GenerateMiss(masm);
2646}
2647
2648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002649void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2650 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002651 Label miss;
2652 __ mov(ecx, edx);
2653 __ and_(ecx, eax);
2654 __ JumpIfSmi(ecx, &miss, Label::kNear);
2655
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002656 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2657 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
2658 __ j(below, &miss, Label::kNear);
2659 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
2660 __ j(below, &miss, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002662 DCHECK_EQ(equal, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002663 __ sub(eax, edx);
2664 __ ret(0);
2665
2666 __ bind(&miss);
2667 GenerateMiss(masm);
2668}
2669
2670
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002671void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002672 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002673 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002674 __ mov(ecx, edx);
2675 __ and_(ecx, eax);
2676 __ JumpIfSmi(ecx, &miss, Label::kNear);
2677
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002678 __ GetWeakValue(edi, cell);
2679 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002680 __ j(not_equal, &miss, Label::kNear);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002681 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002682 __ j(not_equal, &miss, Label::kNear);
2683
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002684 if (Token::IsEqualityOp(op())) {
2685 __ sub(eax, edx);
2686 __ ret(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687 } else {
2688 __ PopReturnAddressTo(ecx);
2689 __ Push(edx);
2690 __ Push(eax);
2691 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
2692 __ PushReturnAddressFrom(ecx);
2693 __ TailCallRuntime(Runtime::kCompare);
2694 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002695
2696 __ bind(&miss);
2697 GenerateMiss(masm);
2698}
2699
2700
2701void CompareICStub::GenerateMiss(MacroAssembler* masm) {
2702 {
2703 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002704 FrameScope scope(masm, StackFrame::INTERNAL);
2705 __ push(edx); // Preserve edx and eax.
2706 __ push(eax);
2707 __ push(edx); // And also use them as the arguments.
2708 __ push(eax);
2709 __ push(Immediate(Smi::FromInt(op())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002710 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002711 // Compute the entry point of the rewritten stub.
2712 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
2713 __ pop(eax);
2714 __ pop(edx);
2715 }
2716
2717 // Do a tail call to the rewritten stub.
2718 __ jmp(edi);
2719}
2720
2721
2722// Helper function used to check that the dictionary doesn't contain
2723// the property. This function may return false negatives, so miss_label
2724// must always call a backup property check that is complete.
2725// This function is safe to call if the receiver has fast properties.
2726// Name must be a unique name and receiver must be a heap object.
2727void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2728 Label* miss,
2729 Label* done,
2730 Register properties,
2731 Handle<Name> name,
2732 Register r0) {
2733 DCHECK(name->IsUniqueName());
2734
2735 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2736 // not equal to the name and kProbes-th slot is not used (its name is the
2737 // undefined value), it guarantees the hash table doesn't contain the
2738 // property. It's true even if some slots represent deleted properties
2739 // (their names are the hole value).
2740 for (int i = 0; i < kInlinedProbes; i++) {
2741 // Compute the masked index: (hash + i + i * i) & mask.
2742 Register index = r0;
2743 // Capacity is smi 2^n.
2744 __ mov(index, FieldOperand(properties, kCapacityOffset));
2745 __ dec(index);
2746 __ and_(index,
2747 Immediate(Smi::FromInt(name->Hash() +
2748 NameDictionary::GetProbeOffset(i))));
2749
2750 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002751 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002752 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
2753 Register entity_name = r0;
2754 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002755 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002756 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
2757 kElementsStartOffset - kHeapObjectTag));
2758 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
2759 __ j(equal, done);
2760
2761 // Stop if found the property.
2762 __ cmp(entity_name, Handle<Name>(name));
2763 __ j(equal, miss);
2764
2765 Label good;
2766 // Check for the hole and skip.
2767 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
2768 __ j(equal, &good, Label::kNear);
2769
2770 // Check if the entry name is not a unique name.
2771 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
2772 __ JumpIfNotUniqueNameInstanceType(
2773 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
2774 __ bind(&good);
2775 }
2776
2777 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
2778 NEGATIVE_LOOKUP);
2779 __ push(Immediate(Handle<Object>(name)));
2780 __ push(Immediate(name->Hash()));
2781 __ CallStub(&stub);
2782 __ test(r0, r0);
2783 __ j(not_zero, miss);
2784 __ jmp(done);
2785}
2786
2787
2788// Probe the name dictionary in the |elements| register. Jump to the
2789// |done| label if a property with the given name is found leaving the
2790// index into the dictionary in |r0|. Jump to the |miss| label
2791// otherwise.
2792void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
2793 Label* miss,
2794 Label* done,
2795 Register elements,
2796 Register name,
2797 Register r0,
2798 Register r1) {
2799 DCHECK(!elements.is(r0));
2800 DCHECK(!elements.is(r1));
2801 DCHECK(!name.is(r0));
2802 DCHECK(!name.is(r1));
2803
2804 __ AssertName(name);
2805
2806 __ mov(r1, FieldOperand(elements, kCapacityOffset));
2807 __ shr(r1, kSmiTagSize); // convert smi to int
2808 __ dec(r1);
2809
2810 // Generate an unrolled loop that performs a few probes before
2811 // giving up. Measurements done on Gmail indicate that 2 probes
2812 // cover ~93% of loads from dictionaries.
2813 for (int i = 0; i < kInlinedProbes; i++) {
2814 // Compute the masked index: (hash + i + i * i) & mask.
2815 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2816 __ shr(r0, Name::kHashShift);
2817 if (i > 0) {
2818 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
2819 }
2820 __ and_(r0, r1);
2821
2822 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002823 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
2825
2826 // Check if the key is identical to the name.
2827 __ cmp(name, Operand(elements,
2828 r0,
2829 times_4,
2830 kElementsStartOffset - kHeapObjectTag));
2831 __ j(equal, done);
2832 }
2833
2834 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
2835 POSITIVE_LOOKUP);
2836 __ push(name);
2837 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2838 __ shr(r0, Name::kHashShift);
2839 __ push(r0);
2840 __ CallStub(&stub);
2841
2842 __ test(r1, r1);
2843 __ j(zero, miss);
2844 __ jmp(done);
2845}
2846
2847
2848void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
2849 // This stub overrides SometimesSetsUpAFrame() to return false. That means
2850 // we cannot call anything that could cause a GC from this stub.
2851 // Stack frame on entry:
2852 // esp[0 * kPointerSize]: return address.
2853 // esp[1 * kPointerSize]: key's hash.
2854 // esp[2 * kPointerSize]: key.
2855 // Registers:
2856 // dictionary_: NameDictionary to probe.
2857 // result_: used as scratch.
2858 // index_: will hold an index of entry if lookup is successful.
2859 // might alias with result_.
2860 // Returns:
2861 // result_ is zero if lookup failed, non zero otherwise.
2862
2863 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
2864
2865 Register scratch = result();
2866
2867 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
2868 __ dec(scratch);
2869 __ SmiUntag(scratch);
2870 __ push(scratch);
2871
2872 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2873 // not equal to the name and kProbes-th slot is not used (its name is the
2874 // undefined value), it guarantees the hash table doesn't contain the
2875 // property. It's true even if some slots represent deleted properties
2876 // (their names are the null value).
2877 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
2878 // Compute the masked index: (hash + i + i * i) & mask.
2879 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2880 if (i > 0) {
2881 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
2882 }
2883 __ and_(scratch, Operand(esp, 0));
2884
2885 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002886 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002887 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
2888
2889 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002890 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002891 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
2892 kElementsStartOffset - kHeapObjectTag));
2893 __ cmp(scratch, isolate()->factory()->undefined_value());
2894 __ j(equal, &not_in_dictionary);
2895
2896 // Stop if found the property.
2897 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
2898 __ j(equal, &in_dictionary);
2899
2900 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
2901 // If we hit a key that is not a unique name during negative
2902 // lookup we have to bailout as this key might be equal to the
2903 // key we are looking for.
2904
2905 // Check if the entry name is not a unique name.
2906 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2907 __ JumpIfNotUniqueNameInstanceType(
2908 FieldOperand(scratch, Map::kInstanceTypeOffset),
2909 &maybe_in_dictionary);
2910 }
2911 }
2912
2913 __ bind(&maybe_in_dictionary);
2914 // If we are doing negative lookup then probing failure should be
2915 // treated as a lookup success. For positive lookup probing failure
2916 // should be treated as lookup failure.
2917 if (mode() == POSITIVE_LOOKUP) {
2918 __ mov(result(), Immediate(0));
2919 __ Drop(1);
2920 __ ret(2 * kPointerSize);
2921 }
2922
2923 __ bind(&in_dictionary);
2924 __ mov(result(), Immediate(1));
2925 __ Drop(1);
2926 __ ret(2 * kPointerSize);
2927
2928 __ bind(&not_in_dictionary);
2929 __ mov(result(), Immediate(0));
2930 __ Drop(1);
2931 __ ret(2 * kPointerSize);
2932}
2933
2934
2935void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
2936 Isolate* isolate) {
2937 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
2938 stub.GetCode();
2939 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
2940 stub2.GetCode();
2941}
2942
2943
2944// Takes the input in 3 registers: address_ value_ and object_. A pointer to
2945// the value has just been written into the object, now this stub makes sure
2946// we keep the GC informed. The word in the object where the value has been
2947// written is in the address register.
2948void RecordWriteStub::Generate(MacroAssembler* masm) {
2949 Label skip_to_incremental_noncompacting;
2950 Label skip_to_incremental_compacting;
2951
2952 // The first two instructions are generated with labels so as to get the
2953 // offset fixed up correctly by the bind(Label*) call. We patch it back and
2954 // forth between a compare instructions (a nop in this position) and the
2955 // real branch when we start and stop incremental heap marking.
2956 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
2957 __ jmp(&skip_to_incremental_compacting, Label::kFar);
2958
2959 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2960 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2961 MacroAssembler::kReturnAtEnd);
2962 } else {
2963 __ ret(0);
2964 }
2965
2966 __ bind(&skip_to_incremental_noncompacting);
2967 GenerateIncremental(masm, INCREMENTAL);
2968
2969 __ bind(&skip_to_incremental_compacting);
2970 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2971
2972 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2973 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2974 masm->set_byte_at(0, kTwoByteNopInstruction);
2975 masm->set_byte_at(2, kFiveByteNopInstruction);
2976}
2977
2978
2979void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2980 regs_.Save(masm);
2981
2982 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2983 Label dont_need_remembered_set;
2984
2985 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2986 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
2987 regs_.scratch0(),
2988 &dont_need_remembered_set);
2989
Ben Murdoch097c5b22016-05-18 11:27:45 +01002990 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2991 &dont_need_remembered_set);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002992
2993 // First notify the incremental marker if necessary, then update the
2994 // remembered set.
2995 CheckNeedsToInformIncrementalMarker(
2996 masm,
2997 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
2998 mode);
2999 InformIncrementalMarker(masm);
3000 regs_.Restore(masm);
3001 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3002 MacroAssembler::kReturnAtEnd);
3003
3004 __ bind(&dont_need_remembered_set);
3005 }
3006
3007 CheckNeedsToInformIncrementalMarker(
3008 masm,
3009 kReturnOnNoNeedToInformIncrementalMarker,
3010 mode);
3011 InformIncrementalMarker(masm);
3012 regs_.Restore(masm);
3013 __ ret(0);
3014}
3015
3016
3017void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3018 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
3019 int argument_count = 3;
3020 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3021 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
3022 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
3023 __ mov(Operand(esp, 2 * kPointerSize),
3024 Immediate(ExternalReference::isolate_address(isolate())));
3025
3026 AllowExternalCallThatCantCauseGC scope(masm);
3027 __ CallCFunction(
3028 ExternalReference::incremental_marking_record_write_function(isolate()),
3029 argument_count);
3030
3031 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
3032}
3033
3034
3035void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3036 MacroAssembler* masm,
3037 OnNoNeedToInformIncrementalMarker on_no_need,
3038 Mode mode) {
3039 Label object_is_black, need_incremental, need_incremental_pop_object;
3040
3041 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
3042 __ and_(regs_.scratch0(), regs_.object());
3043 __ mov(regs_.scratch1(),
3044 Operand(regs_.scratch0(),
3045 MemoryChunk::kWriteBarrierCounterOffset));
3046 __ sub(regs_.scratch1(), Immediate(1));
3047 __ mov(Operand(regs_.scratch0(),
3048 MemoryChunk::kWriteBarrierCounterOffset),
3049 regs_.scratch1());
3050 __ j(negative, &need_incremental);
3051
3052 // Let's look at the color of the object: If it is not black we don't have
3053 // to inform the incremental marker.
3054 __ JumpIfBlack(regs_.object(),
3055 regs_.scratch0(),
3056 regs_.scratch1(),
3057 &object_is_black,
3058 Label::kNear);
3059
3060 regs_.Restore(masm);
3061 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3062 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3063 MacroAssembler::kReturnAtEnd);
3064 } else {
3065 __ ret(0);
3066 }
3067
3068 __ bind(&object_is_black);
3069
3070 // Get the value from the slot.
3071 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3072
3073 if (mode == INCREMENTAL_COMPACTION) {
3074 Label ensure_not_white;
3075
3076 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3077 regs_.scratch1(), // Scratch.
3078 MemoryChunk::kEvacuationCandidateMask,
3079 zero,
3080 &ensure_not_white,
3081 Label::kNear);
3082
3083 __ CheckPageFlag(regs_.object(),
3084 regs_.scratch1(), // Scratch.
3085 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3086 not_zero,
3087 &ensure_not_white,
3088 Label::kNear);
3089
3090 __ jmp(&need_incremental);
3091
3092 __ bind(&ensure_not_white);
3093 }
3094
3095 // We need an extra register for this, so we push the object register
3096 // temporarily.
3097 __ push(regs_.object());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003098 __ JumpIfWhite(regs_.scratch0(), // The value.
3099 regs_.scratch1(), // Scratch.
3100 regs_.object(), // Scratch.
3101 &need_incremental_pop_object, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003102 __ pop(regs_.object());
3103
3104 regs_.Restore(masm);
3105 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3106 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3107 MacroAssembler::kReturnAtEnd);
3108 } else {
3109 __ ret(0);
3110 }
3111
3112 __ bind(&need_incremental_pop_object);
3113 __ pop(regs_.object());
3114
3115 __ bind(&need_incremental);
3116
3117 // Fall through when we need to inform the incremental marker.
3118}
3119
3120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003121void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3122 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3123 __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
3124 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003125 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003126 __ mov(ebx, MemOperand(ebp, parameter_count_offset));
3127 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3128 __ pop(ecx);
3129 int additional_offset =
3130 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
3131 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
3132 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
3133}
3134
3135
3136void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003137 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003138 LoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003139 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003140}
3141
3142
3143void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003145 KeyedLoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003146 stub.GenerateForTrampoline(masm);
3147}
3148
3149
3150static void HandleArrayCases(MacroAssembler* masm, Register receiver,
3151 Register key, Register vector, Register slot,
3152 Register feedback, bool is_polymorphic,
3153 Label* miss) {
3154 // feedback initially contains the feedback array
3155 Label next, next_loop, prepare_next;
3156 Label load_smi_map, compare_map;
3157 Label start_polymorphic;
3158
3159 __ push(receiver);
3160 __ push(vector);
3161
3162 Register receiver_map = receiver;
3163 Register cached_map = vector;
3164
3165 // Receiver might not be a heap object.
3166 __ JumpIfSmi(receiver, &load_smi_map);
3167 __ mov(receiver_map, FieldOperand(receiver, 0));
3168 __ bind(&compare_map);
3169 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3170
3171 // A named keyed load might have a 2 element array, all other cases can count
3172 // on an array with at least 2 {map, handler} pairs, so they can go right
3173 // into polymorphic array handling.
3174 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3175 __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
3176
3177 // found, now call handler.
3178 Register handler = feedback;
3179 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3180 __ pop(vector);
3181 __ pop(receiver);
3182 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3183 __ jmp(handler);
3184
3185 if (!is_polymorphic) {
3186 __ bind(&next);
3187 __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
3188 Immediate(Smi::FromInt(2)));
3189 __ j(not_equal, &start_polymorphic);
3190 __ pop(vector);
3191 __ pop(receiver);
3192 __ jmp(miss);
3193 }
3194
3195 // Polymorphic, we have to loop from 2 to N
3196 __ bind(&start_polymorphic);
3197 __ push(key);
3198 Register counter = key;
3199 __ mov(counter, Immediate(Smi::FromInt(2)));
3200 __ bind(&next_loop);
3201 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3202 FixedArray::kHeaderSize));
3203 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3204 __ j(not_equal, &prepare_next);
3205 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
3206 FixedArray::kHeaderSize + kPointerSize));
3207 __ pop(key);
3208 __ pop(vector);
3209 __ pop(receiver);
3210 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3211 __ jmp(handler);
3212
3213 __ bind(&prepare_next);
3214 __ add(counter, Immediate(Smi::FromInt(2)));
3215 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3216 __ j(less, &next_loop);
3217
3218 // We exhausted our array of map handler pairs.
3219 __ pop(key);
3220 __ pop(vector);
3221 __ pop(receiver);
3222 __ jmp(miss);
3223
3224 __ bind(&load_smi_map);
3225 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3226 __ jmp(&compare_map);
3227}
3228
3229
3230static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3231 Register key, Register vector, Register slot,
3232 Register weak_cell, Label* miss) {
3233 // feedback initially contains the feedback array
3234 Label compare_smi_map;
3235
3236 // Move the weak map into the weak_cell register.
3237 Register ic_map = weak_cell;
3238 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
3239
3240 // Receiver might not be a heap object.
3241 __ JumpIfSmi(receiver, &compare_smi_map);
3242 __ cmp(ic_map, FieldOperand(receiver, 0));
3243 __ j(not_equal, miss);
3244 Register handler = weak_cell;
3245 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
3246 FixedArray::kHeaderSize + kPointerSize));
3247 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3248 __ jmp(handler);
3249
3250 // In microbenchmarks, it made sense to unroll this code so that the call to
3251 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
3252 __ bind(&compare_smi_map);
3253 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
3254 __ j(not_equal, miss);
3255 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
3256 FixedArray::kHeaderSize + kPointerSize));
3257 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3258 __ jmp(handler);
3259}
3260
3261
3262void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3263
3264
3265void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3266 GenerateImpl(masm, true);
3267}
3268
3269
3270void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3271 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
3272 Register name = LoadWithVectorDescriptor::NameRegister(); // ecx
3273 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
3274 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
3275 Register scratch = edi;
3276 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3277 FixedArray::kHeaderSize));
3278
3279 // Is it a weak cell?
3280 Label try_array;
3281 Label not_array, smi_key, key_okay, miss;
3282 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3283 __ j(not_equal, &try_array);
3284 HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
3285
3286 // Is it a fixed array?
3287 __ bind(&try_array);
3288 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3289 __ j(not_equal, &not_array);
3290 HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
3291
3292 __ bind(&not_array);
3293 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3294 __ j(not_equal, &miss);
3295 __ push(slot);
3296 __ push(vector);
Ben Murdochc5610432016-08-08 18:44:38 +01003297 Code::Flags code_flags =
3298 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003299 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3300 receiver, name, vector, scratch);
3301 __ pop(vector);
3302 __ pop(slot);
3303
3304 __ bind(&miss);
3305 LoadIC::GenerateMiss(masm);
3306}
3307
3308
3309void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3310 GenerateImpl(masm, false);
3311}
3312
3313
3314void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3315 GenerateImpl(masm, true);
3316}
3317
3318
3319void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3320 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx
3321 Register key = LoadWithVectorDescriptor::NameRegister(); // ecx
3322 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx
3323 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax
3324 Register feedback = edi;
3325 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
3326 FixedArray::kHeaderSize));
3327 // Is it a weak cell?
3328 Label try_array;
3329 Label not_array, smi_key, key_okay, miss;
3330 __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
3331 __ j(not_equal, &try_array);
3332 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
3333
3334 __ bind(&try_array);
3335 // Is it a fixed array?
3336 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
3337 __ j(not_equal, &not_array);
3338
3339 // We have a polymorphic element handler.
3340 Label polymorphic, try_poly_name;
3341 __ bind(&polymorphic);
3342 HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
3343
3344 __ bind(&not_array);
3345 // Is it generic?
3346 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3347 __ j(not_equal, &try_poly_name);
3348 Handle<Code> megamorphic_stub =
3349 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3350 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3351
3352 __ bind(&try_poly_name);
3353 // We might have a name in feedback, and a fixed array in the next slot.
3354 __ cmp(key, feedback);
3355 __ j(not_equal, &miss);
3356 // If the name comparison succeeded, we know we have a fixed array with
3357 // at least one map/handler pair.
3358 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
3359 FixedArray::kHeaderSize + kPointerSize));
3360 HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
3361
3362 __ bind(&miss);
3363 KeyedLoadIC::GenerateMiss(masm);
3364}
3365
3366
3367void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3368 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3369 VectorStoreICStub stub(isolate(), state());
3370 stub.GenerateForTrampoline(masm);
3371}
3372
3373
3374void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3375 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3376 VectorKeyedStoreICStub stub(isolate(), state());
3377 stub.GenerateForTrampoline(masm);
3378}
3379
3380
3381void VectorStoreICStub::Generate(MacroAssembler* masm) {
3382 GenerateImpl(masm, false);
3383}
3384
3385
3386void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3387 GenerateImpl(masm, true);
3388}
3389
3390
3391// value is on the stack already.
3392static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
3393 Register key, Register vector,
3394 Register slot, Register feedback,
3395 bool is_polymorphic, Label* miss) {
3396 // feedback initially contains the feedback array
3397 Label next, next_loop, prepare_next;
3398 Label load_smi_map, compare_map;
3399 Label start_polymorphic;
3400 Label pop_and_miss;
3401 ExternalReference virtual_register =
3402 ExternalReference::virtual_handler_register(masm->isolate());
3403
3404 __ push(receiver);
3405 __ push(vector);
3406
3407 Register receiver_map = receiver;
3408 Register cached_map = vector;
3409
3410 // Receiver might not be a heap object.
3411 __ JumpIfSmi(receiver, &load_smi_map);
3412 __ mov(receiver_map, FieldOperand(receiver, 0));
3413 __ bind(&compare_map);
3414 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3415
3416 // A named keyed store might have a 2 element array, all other cases can count
3417 // on an array with at least 2 {map, handler} pairs, so they can go right
3418 // into polymorphic array handling.
3419 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3420 __ j(not_equal, &start_polymorphic);
3421
3422 // found, now call handler.
3423 Register handler = feedback;
3424 DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
3425 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3426 __ pop(vector);
3427 __ pop(receiver);
3428 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3429 __ mov(Operand::StaticVariable(virtual_register), handler);
3430 __ pop(handler); // Pop "value".
3431 __ jmp(Operand::StaticVariable(virtual_register));
3432
3433 // Polymorphic, we have to loop from 2 to N
3434 __ bind(&start_polymorphic);
3435 __ push(key);
3436 Register counter = key;
3437 __ mov(counter, Immediate(Smi::FromInt(2)));
3438
3439 if (!is_polymorphic) {
3440 // If is_polymorphic is false, we may only have a two element array.
3441 // Check against length now in that case.
3442 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3443 __ j(greater_equal, &pop_and_miss);
3444 }
3445
3446 __ bind(&next_loop);
3447 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3448 FixedArray::kHeaderSize));
3449 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3450 __ j(not_equal, &prepare_next);
3451 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
3452 FixedArray::kHeaderSize + kPointerSize));
3453 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
3454 __ pop(key);
3455 __ pop(vector);
3456 __ pop(receiver);
3457 __ mov(Operand::StaticVariable(virtual_register), handler);
3458 __ pop(handler); // Pop "value".
3459 __ jmp(Operand::StaticVariable(virtual_register));
3460
3461 __ bind(&prepare_next);
3462 __ add(counter, Immediate(Smi::FromInt(2)));
3463 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3464 __ j(less, &next_loop);
3465
3466 // We exhausted our array of map handler pairs.
3467 __ bind(&pop_and_miss);
3468 __ pop(key);
3469 __ pop(vector);
3470 __ pop(receiver);
3471 __ jmp(miss);
3472
3473 __ bind(&load_smi_map);
3474 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3475 __ jmp(&compare_map);
3476}
3477
3478
3479static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
3480 Register key, Register vector,
3481 Register slot, Register weak_cell,
3482 Label* miss) {
3483 // The store ic value is on the stack.
3484 DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
3485 ExternalReference virtual_register =
3486 ExternalReference::virtual_handler_register(masm->isolate());
3487
3488 // feedback initially contains the feedback array
3489 Label compare_smi_map;
3490
3491 // Move the weak map into the weak_cell register.
3492 Register ic_map = weak_cell;
3493 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
3494
3495 // Receiver might not be a heap object.
3496 __ JumpIfSmi(receiver, &compare_smi_map);
3497 __ cmp(ic_map, FieldOperand(receiver, 0));
3498 __ j(not_equal, miss);
3499 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3500 FixedArray::kHeaderSize + kPointerSize));
3501 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3502 // Put the store ic value back in it's register.
3503 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
3504 __ pop(weak_cell); // Pop "value".
3505 // jump to the handler.
3506 __ jmp(Operand::StaticVariable(virtual_register));
3507
3508 // In microbenchmarks, it made sense to unroll this code so that the call to
3509 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
3510 __ bind(&compare_smi_map);
3511 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
3512 __ j(not_equal, miss);
3513 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
3514 FixedArray::kHeaderSize + kPointerSize));
3515 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
3516 __ mov(Operand::StaticVariable(virtual_register), weak_cell);
3517 __ pop(weak_cell); // Pop "value".
3518 // jump to the handler.
3519 __ jmp(Operand::StaticVariable(virtual_register));
3520}
3521
3522
3523void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3524 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
3525 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
3526 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
3527 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
3528 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
3529 Label miss;
3530
3531 __ push(value);
3532
3533 Register scratch = value;
3534 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3535 FixedArray::kHeaderSize));
3536
3537 // Is it a weak cell?
3538 Label try_array;
3539 Label not_array, smi_key, key_okay;
3540 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3541 __ j(not_equal, &try_array);
3542 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
3543
3544 // Is it a fixed array?
3545 __ bind(&try_array);
3546 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3547 __ j(not_equal, &not_array);
3548 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
3549 &miss);
3550
3551 __ bind(&not_array);
3552 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3553 __ j(not_equal, &miss);
3554
3555 __ pop(value);
3556 __ push(slot);
3557 __ push(vector);
Ben Murdochc5610432016-08-08 18:44:38 +01003558 Code::Flags code_flags =
3559 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003560 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
3561 receiver, key, slot, no_reg);
3562 __ pop(vector);
3563 __ pop(slot);
3564 Label no_pop_miss;
3565 __ jmp(&no_pop_miss);
3566
3567 __ bind(&miss);
3568 __ pop(value);
3569 __ bind(&no_pop_miss);
3570 StoreIC::GenerateMiss(masm);
3571}
3572
3573
3574void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3575 GenerateImpl(masm, false);
3576}
3577
3578
3579void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3580 GenerateImpl(masm, true);
3581}
3582
3583
3584static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
3585 Register receiver, Register key,
3586 Register vector, Register slot,
3587 Register feedback, Label* miss) {
3588 // feedback initially contains the feedback array
3589 Label next, next_loop, prepare_next;
3590 Label load_smi_map, compare_map;
3591 Label transition_call;
3592 Label pop_and_miss;
3593 ExternalReference virtual_register =
3594 ExternalReference::virtual_handler_register(masm->isolate());
3595 ExternalReference virtual_slot =
3596 ExternalReference::virtual_slot_register(masm->isolate());
3597
3598 __ push(receiver);
3599 __ push(vector);
3600
3601 Register receiver_map = receiver;
3602 Register cached_map = vector;
3603 Register value = StoreDescriptor::ValueRegister();
3604
3605 // Receiver might not be a heap object.
3606 __ JumpIfSmi(receiver, &load_smi_map);
3607 __ mov(receiver_map, FieldOperand(receiver, 0));
3608 __ bind(&compare_map);
3609
3610 // Polymorphic, we have to loop from 0 to N - 1
3611 __ push(key);
3612 // Current stack layout:
3613 // - esp[0] -- key
3614 // - esp[4] -- vector
3615 // - esp[8] -- receiver
3616 // - esp[12] -- value
3617 // - esp[16] -- return address
3618 //
3619 // Required stack layout for handler call:
3620 // - esp[0] -- return address
3621 // - receiver, key, value, vector, slot in registers.
3622 // - handler in virtual register.
3623 Register counter = key;
3624 __ mov(counter, Immediate(Smi::FromInt(0)));
3625 __ bind(&next_loop);
3626 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3627 FixedArray::kHeaderSize));
3628 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3629 __ j(not_equal, &prepare_next);
3630 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3631 FixedArray::kHeaderSize + kPointerSize));
3632 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3633 __ j(not_equal, &transition_call);
3634 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3635 FixedArray::kHeaderSize + 2 * kPointerSize));
3636 __ pop(key);
3637 __ pop(vector);
3638 __ pop(receiver);
3639 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3640 __ mov(Operand::StaticVariable(virtual_register), feedback);
3641 __ pop(value);
3642 __ jmp(Operand::StaticVariable(virtual_register));
3643
3644 __ bind(&transition_call);
3645 // Current stack layout:
3646 // - esp[0] -- key
3647 // - esp[4] -- vector
3648 // - esp[8] -- receiver
3649 // - esp[12] -- value
3650 // - esp[16] -- return address
3651 //
3652 // Required stack layout for handler call:
3653 // - esp[0] -- return address
3654 // - receiver, key, value, map, vector in registers.
3655 // - handler and slot in virtual registers.
3656 __ mov(Operand::StaticVariable(virtual_slot), slot);
3657 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3658 FixedArray::kHeaderSize + 2 * kPointerSize));
3659 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3660 __ mov(Operand::StaticVariable(virtual_register), feedback);
3661
3662 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3663 // The weak cell may have been cleared.
3664 __ JumpIfSmi(cached_map, &pop_and_miss);
3665 DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
3666 __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
3667
3668 // Pop key into place.
3669 __ pop(key);
3670 __ pop(vector);
3671 __ pop(receiver);
3672 __ pop(value);
3673 __ jmp(Operand::StaticVariable(virtual_register));
3674
3675 __ bind(&prepare_next);
3676 __ add(counter, Immediate(Smi::FromInt(3)));
3677 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3678 __ j(less, &next_loop);
3679
3680 // We exhausted our array of map handler pairs.
3681 __ bind(&pop_and_miss);
3682 __ pop(key);
3683 __ pop(vector);
3684 __ pop(receiver);
3685 __ jmp(miss);
3686
3687 __ bind(&load_smi_map);
3688 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3689 __ jmp(&compare_map);
3690}
3691
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003692void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3693 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
3694 Register key = VectorStoreICDescriptor::NameRegister(); // ecx
3695 Register value = VectorStoreICDescriptor::ValueRegister(); // eax
3696 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
3697 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
3698 Label miss;
3699
3700 __ push(value);
3701
3702 Register scratch = value;
3703 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3704 FixedArray::kHeaderSize));
3705
3706 // Is it a weak cell?
3707 Label try_array;
3708 Label not_array, smi_key, key_okay;
3709 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3710 __ j(not_equal, &try_array);
3711 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
3712
3713 // Is it a fixed array?
3714 __ bind(&try_array);
3715 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3716 __ j(not_equal, &not_array);
3717 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
3718 &miss);
3719
3720 __ bind(&not_array);
3721 Label try_poly_name;
3722 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3723 __ j(not_equal, &try_poly_name);
3724
3725 __ pop(value);
3726
3727 Handle<Code> megamorphic_stub =
3728 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3729 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3730
3731 __ bind(&try_poly_name);
3732 // We might have a name in feedback, and a fixed array in the next slot.
3733 __ cmp(key, scratch);
3734 __ j(not_equal, &miss);
3735 // If the name comparison succeeded, we know we have a fixed array with
3736 // at least one map/handler pair.
3737 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3738 FixedArray::kHeaderSize + kPointerSize));
3739 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
3740 &miss);
3741
3742 __ bind(&miss);
3743 __ pop(value);
3744 KeyedStoreIC::GenerateMiss(masm);
3745}
3746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003747void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3748 __ EmitLoadTypeFeedbackVector(ebx);
3749 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003750 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3751}
3752
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003753void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3754 if (masm->isolate()->function_entry_hook() != NULL) {
3755 ProfileEntryHookStub stub(masm->isolate());
3756 masm->CallStub(&stub);
3757 }
3758}
3759
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003760void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3761 // Save volatile registers.
3762 const int kNumSavedRegisters = 3;
3763 __ push(eax);
3764 __ push(ecx);
3765 __ push(edx);
3766
3767 // Calculate and push the original stack pointer.
3768 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3769 __ push(eax);
3770
3771 // Retrieve our return address and use it to calculate the calling
3772 // function's address.
3773 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3774 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
3775 __ push(eax);
3776
3777 // Call the entry hook.
3778 DCHECK(isolate()->function_entry_hook() != NULL);
3779 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
3780 RelocInfo::RUNTIME_ENTRY);
3781 __ add(esp, Immediate(2 * kPointerSize));
3782
3783 // Restore ecx.
3784 __ pop(edx);
3785 __ pop(ecx);
3786 __ pop(eax);
3787
3788 __ ret(0);
3789}
3790
Ben Murdoch097c5b22016-05-18 11:27:45 +01003791template <class T>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003792static void CreateArrayDispatch(MacroAssembler* masm,
3793 AllocationSiteOverrideMode mode) {
3794 if (mode == DISABLE_ALLOCATION_SITES) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003795 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003796 __ TailCallStub(&stub);
3797 } else if (mode == DONT_OVERRIDE) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003798 int last_index =
3799 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003800 for (int i = 0; i <= last_index; ++i) {
3801 Label next;
3802 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3803 __ cmp(edx, kind);
3804 __ j(not_equal, &next);
3805 T stub(masm->isolate(), kind);
3806 __ TailCallStub(&stub);
3807 __ bind(&next);
3808 }
3809
3810 // If we reached this point there is a problem.
3811 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3812 } else {
3813 UNREACHABLE();
3814 }
3815}
3816
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003817static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3818 AllocationSiteOverrideMode mode) {
3819 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3820 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
3821 // eax - number of arguments
3822 // edi - constructor?
3823 // esp[0] - return address
3824 // esp[4] - last argument
3825 Label normal_sequence;
3826 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003827 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3828 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3829 STATIC_ASSERT(FAST_ELEMENTS == 2);
3830 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3831 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3832 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003833
3834 // is the low bit set? If so, we are holey and that is good.
Ben Murdochda12d292016-06-02 14:46:10 +01003835 __ test_b(edx, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003836 __ j(not_zero, &normal_sequence);
3837 }
3838
3839 // look at the first argument
3840 __ mov(ecx, Operand(esp, kPointerSize));
3841 __ test(ecx, ecx);
3842 __ j(zero, &normal_sequence);
3843
3844 if (mode == DISABLE_ALLOCATION_SITES) {
3845 ElementsKind initial = GetInitialFastElementsKind();
3846 ElementsKind holey_initial = GetHoleyElementsKind(initial);
3847
Ben Murdoch097c5b22016-05-18 11:27:45 +01003848 ArraySingleArgumentConstructorStub stub_holey(
3849 masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003850 __ TailCallStub(&stub_holey);
3851
3852 __ bind(&normal_sequence);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003853 ArraySingleArgumentConstructorStub stub(masm->isolate(), initial,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003854 DISABLE_ALLOCATION_SITES);
3855 __ TailCallStub(&stub);
3856 } else if (mode == DONT_OVERRIDE) {
3857 // We are going to create a holey array, but our kind is non-holey.
3858 // Fix kind and retry.
3859 __ inc(edx);
3860
3861 if (FLAG_debug_code) {
3862 Handle<Map> allocation_site_map =
3863 masm->isolate()->factory()->allocation_site_map();
3864 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
3865 __ Assert(equal, kExpectedAllocationSite);
3866 }
3867
3868 // Save the resulting elements kind in type info. We can't just store r3
3869 // in the AllocationSite::transition_info field because elements kind is
3870 // restricted to a portion of the field...upper bits need to be left alone.
3871 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3872 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
3873 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
3874
3875 __ bind(&normal_sequence);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003876 int last_index =
3877 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003878 for (int i = 0; i <= last_index; ++i) {
3879 Label next;
3880 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3881 __ cmp(edx, kind);
3882 __ j(not_equal, &next);
3883 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3884 __ TailCallStub(&stub);
3885 __ bind(&next);
3886 }
3887
3888 // If we reached this point there is a problem.
3889 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3890 } else {
3891 UNREACHABLE();
3892 }
3893}
3894
Ben Murdoch097c5b22016-05-18 11:27:45 +01003895template <class T>
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003896static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003897 int to_index =
3898 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003899 for (int i = 0; i <= to_index; ++i) {
3900 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3901 T stub(isolate, kind);
3902 stub.GetCode();
3903 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
3904 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
3905 stub1.GetCode();
3906 }
3907 }
3908}
3909
Ben Murdoch61f157c2016-09-16 13:49:30 +01003910void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003911 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
3912 isolate);
3913 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
3914 isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003915 ArrayNArgumentsConstructorStub stub(isolate);
3916 stub.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003917
Ben Murdoch097c5b22016-05-18 11:27:45 +01003918 ElementsKind kinds[2] = {FAST_ELEMENTS, FAST_HOLEY_ELEMENTS};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003919 for (int i = 0; i < 2; i++) {
3920 // For internal arrays we only need a few things
3921 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
3922 stubh1.GetCode();
3923 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
3924 stubh2.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003925 }
3926}
3927
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003928void ArrayConstructorStub::GenerateDispatchToArrayStub(
Ben Murdoch097c5b22016-05-18 11:27:45 +01003929 MacroAssembler* masm, AllocationSiteOverrideMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003930 if (argument_count() == ANY) {
3931 Label not_zero_case, not_one_case;
3932 __ test(eax, eax);
3933 __ j(not_zero, &not_zero_case);
3934 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3935
3936 __ bind(&not_zero_case);
3937 __ cmp(eax, 1);
3938 __ j(greater, &not_one_case);
3939 CreateArrayDispatchOneArgument(masm, mode);
3940
3941 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01003942 ArrayNArgumentsConstructorStub stub(masm->isolate());
3943 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003944 } else if (argument_count() == NONE) {
3945 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3946 } else if (argument_count() == ONE) {
3947 CreateArrayDispatchOneArgument(masm, mode);
3948 } else if (argument_count() == MORE_THAN_ONE) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01003949 ArrayNArgumentsConstructorStub stub(masm->isolate());
3950 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003951 } else {
3952 UNREACHABLE();
3953 }
3954}
3955
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003956void ArrayConstructorStub::Generate(MacroAssembler* masm) {
3957 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003958 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003959 // -- ebx : AllocationSite or undefined
3960 // -- edi : constructor
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003961 // -- edx : Original constructor
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962 // -- esp[0] : return address
3963 // -- esp[4] : last argument
3964 // -----------------------------------
3965 if (FLAG_debug_code) {
3966 // The array construct code is only set for the global and natives
3967 // builtin Array functions which always have maps.
3968
3969 // Initial map for the builtin Array function should be a map.
3970 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3971 // Will both indicate a NULL and a Smi.
3972 __ test(ecx, Immediate(kSmiTagMask));
3973 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
3974 __ CmpObjectType(ecx, MAP_TYPE, ecx);
3975 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
3976
3977 // We should either have undefined in ebx or a valid AllocationSite
3978 __ AssertUndefinedOrAllocationSite(ebx);
3979 }
3980
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003981 Label subclassing;
3982
3983 // Enter the context of the Array function.
3984 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
3985
3986 __ cmp(edx, edi);
3987 __ j(not_equal, &subclassing);
3988
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003989 Label no_info;
3990 // If the feedback vector is the undefined value call an array constructor
3991 // that doesn't use AllocationSites.
3992 __ cmp(ebx, isolate()->factory()->undefined_value());
3993 __ j(equal, &no_info);
3994
3995 // Only look at the lower 16 bits of the transition info.
3996 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
3997 __ SmiUntag(edx);
3998 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3999 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
4000 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4001
4002 __ bind(&no_info);
4003 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004004
4005 // Subclassing.
4006 __ bind(&subclassing);
4007 switch (argument_count()) {
4008 case ANY:
4009 case MORE_THAN_ONE:
4010 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
4011 __ add(eax, Immediate(3));
4012 break;
4013 case NONE:
4014 __ mov(Operand(esp, 1 * kPointerSize), edi);
4015 __ mov(eax, Immediate(3));
4016 break;
4017 case ONE:
4018 __ mov(Operand(esp, 2 * kPointerSize), edi);
4019 __ mov(eax, Immediate(4));
4020 break;
4021 }
4022 __ PopReturnAddressTo(ecx);
4023 __ Push(edx);
4024 __ Push(ebx);
4025 __ PushReturnAddressFrom(ecx);
4026 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004027}
4028
Ben Murdoch097c5b22016-05-18 11:27:45 +01004029void InternalArrayConstructorStub::GenerateCase(MacroAssembler* masm,
4030 ElementsKind kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004031 Label not_zero_case, not_one_case;
4032 Label normal_sequence;
4033
4034 __ test(eax, eax);
4035 __ j(not_zero, &not_zero_case);
4036 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4037 __ TailCallStub(&stub0);
4038
4039 __ bind(&not_zero_case);
4040 __ cmp(eax, 1);
4041 __ j(greater, &not_one_case);
4042
4043 if (IsFastPackedElementsKind(kind)) {
4044 // We might need to create a holey array
4045 // look at the first argument
4046 __ mov(ecx, Operand(esp, kPointerSize));
4047 __ test(ecx, ecx);
4048 __ j(zero, &normal_sequence);
4049
Ben Murdoch097c5b22016-05-18 11:27:45 +01004050 InternalArraySingleArgumentConstructorStub stub1_holey(
4051 isolate(), GetHoleyElementsKind(kind));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004052 __ TailCallStub(&stub1_holey);
4053 }
4054
4055 __ bind(&normal_sequence);
4056 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4057 __ TailCallStub(&stub1);
4058
4059 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004060 ArrayNArgumentsConstructorStub stubN(isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004061 __ TailCallStub(&stubN);
4062}
4063
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004064void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4065 // ----------- S t a t e -------------
4066 // -- eax : argc
4067 // -- edi : constructor
4068 // -- esp[0] : return address
4069 // -- esp[4] : last argument
4070 // -----------------------------------
4071
4072 if (FLAG_debug_code) {
4073 // The array construct code is only set for the global and natives
4074 // builtin Array functions which always have maps.
4075
4076 // Initial map for the builtin Array function should be a map.
4077 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4078 // Will both indicate a NULL and a Smi.
4079 __ test(ecx, Immediate(kSmiTagMask));
4080 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4081 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4082 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4083 }
4084
4085 // Figure out the right elements kind
4086 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4087
4088 // Load the map's "bit field 2" into |result|. We only need the first byte,
4089 // but the following masking takes care of that anyway.
4090 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
4091 // Retrieve elements_kind from bit field 2.
4092 __ DecodeField<Map::ElementsKindBits>(ecx);
4093
4094 if (FLAG_debug_code) {
4095 Label done;
4096 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4097 __ j(equal, &done);
4098 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004099 __ Assert(equal, kInvalidElementsKindForInternalArrayOrInternalPackedArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004100 __ bind(&done);
4101 }
4102
4103 Label fast_elements_case;
4104 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4105 __ j(equal, &fast_elements_case);
4106 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4107
4108 __ bind(&fast_elements_case);
4109 GenerateCase(masm, FAST_ELEMENTS);
4110}
4111
Ben Murdoch097c5b22016-05-18 11:27:45 +01004112void FastNewObjectStub::Generate(MacroAssembler* masm) {
4113 // ----------- S t a t e -------------
4114 // -- edi : target
4115 // -- edx : new target
4116 // -- esi : context
4117 // -- esp[0] : return address
4118 // -----------------------------------
4119 __ AssertFunction(edi);
4120 __ AssertReceiver(edx);
4121
4122 // Verify that the new target is a JSFunction.
4123 Label new_object;
4124 __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
4125 __ j(not_equal, &new_object);
4126
4127 // Load the initial map and verify that it's in fact a map.
4128 __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
4129 __ JumpIfSmi(ecx, &new_object);
4130 __ CmpObjectType(ecx, MAP_TYPE, ebx);
4131 __ j(not_equal, &new_object);
4132
4133 // Fall back to runtime if the target differs from the new target's
4134 // initial map constructor.
4135 __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
4136 __ j(not_equal, &new_object);
4137
4138 // Allocate the JSObject on the heap.
4139 Label allocate, done_allocate;
4140 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
4141 __ lea(ebx, Operand(ebx, times_pointer_size, 0));
4142 __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4143 __ bind(&done_allocate);
4144
4145 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004146 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
4147 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004148 masm->isolate()->factory()->empty_fixed_array());
Ben Murdochc5610432016-08-08 18:44:38 +01004149 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004150 masm->isolate()->factory()->empty_fixed_array());
4151 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004152 __ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004153
4154 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004155 // -- eax : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004156 // -- ebx : result fields (untagged)
4157 // -- edi : result end (untagged)
4158 // -- ecx : initial map
4159 // -- esi : context
4160 // -- esp[0] : return address
4161 // -----------------------------------
4162
4163 // Perform in-object slack tracking if requested.
4164 Label slack_tracking;
4165 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4166 __ test(FieldOperand(ecx, Map::kBitField3Offset),
4167 Immediate(Map::ConstructionCounter::kMask));
4168 __ j(not_zero, &slack_tracking, Label::kNear);
4169 {
4170 // Initialize all in-object fields with undefined.
4171 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
4172 __ InitializeFieldsWithFiller(ebx, edi, edx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004173 __ Ret();
4174 }
4175 __ bind(&slack_tracking);
4176 {
4177 // Decrease generous allocation count.
4178 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4179 __ sub(FieldOperand(ecx, Map::kBitField3Offset),
4180 Immediate(1 << Map::ConstructionCounter::kShift));
4181
4182 // Initialize the in-object fields with undefined.
4183 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
4184 __ neg(edx);
4185 __ lea(edx, Operand(edi, edx, times_pointer_size, 0));
4186 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
4187 __ InitializeFieldsWithFiller(ebx, edx, edi);
4188
4189 // Initialize the remaining (reserved) fields with one pointer filler map.
4190 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
4191 __ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
4192 __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
4193 __ InitializeFieldsWithFiller(ebx, edx, edi);
4194
Ben Murdoch097c5b22016-05-18 11:27:45 +01004195 // Check if we can finalize the instance size.
4196 Label finalize;
4197 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4198 __ test(FieldOperand(ecx, Map::kBitField3Offset),
4199 Immediate(Map::ConstructionCounter::kMask));
4200 __ j(zero, &finalize, Label::kNear);
4201 __ Ret();
4202
4203 // Finalize the instance size.
4204 __ bind(&finalize);
4205 {
4206 FrameScope scope(masm, StackFrame::INTERNAL);
4207 __ Push(eax);
4208 __ Push(ecx);
4209 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4210 __ Pop(eax);
4211 }
4212 __ Ret();
4213 }
4214
4215 // Fall back to %AllocateInNewSpace.
4216 __ bind(&allocate);
4217 {
4218 FrameScope scope(masm, StackFrame::INTERNAL);
4219 __ SmiTag(ebx);
4220 __ Push(ecx);
4221 __ Push(ebx);
4222 __ CallRuntime(Runtime::kAllocateInNewSpace);
4223 __ Pop(ecx);
4224 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004225 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
4226 __ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
Ben Murdochc5610432016-08-08 18:44:38 +01004227 STATIC_ASSERT(kHeapObjectTag == 1);
4228 __ dec(edi);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004229 __ jmp(&done_allocate);
4230
4231 // Fall back to %NewObject.
4232 __ bind(&new_object);
4233 __ PopReturnAddressTo(ecx);
4234 __ Push(edi);
4235 __ Push(edx);
4236 __ PushReturnAddressFrom(ecx);
4237 __ TailCallRuntime(Runtime::kNewObject);
4238}
4239
4240void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4241 // ----------- S t a t e -------------
4242 // -- edi : function
4243 // -- esi : context
4244 // -- ebp : frame pointer
4245 // -- esp[0] : return address
4246 // -----------------------------------
4247 __ AssertFunction(edi);
4248
Ben Murdochc5610432016-08-08 18:44:38 +01004249 // Make edx point to the JavaScript frame.
4250 __ mov(edx, ebp);
4251 if (skip_stub_frame()) {
4252 // For Ignition we need to skip the handler/stub frame to reach the
4253 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004254 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004255 }
4256 if (FLAG_debug_code) {
4257 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004258 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004259 __ j(equal, &ok);
4260 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4261 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004262 }
4263
4264 // Check if we have rest parameters (only possible if we have an
4265 // arguments adaptor frame below the function frame).
4266 Label no_rest_parameters;
4267 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004268 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004269 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4270 __ j(not_equal, &no_rest_parameters, Label::kNear);
4271
4272 // Check if the arguments adaptor frame contains more arguments than
4273 // specified by the function's internal formal parameter count.
4274 Label rest_parameters;
4275 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4276 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4277 __ sub(eax,
4278 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
4279 __ j(greater, &rest_parameters);
4280
4281 // Return an empty rest parameter array.
4282 __ bind(&no_rest_parameters);
4283 {
4284 // ----------- S t a t e -------------
4285 // -- esi : context
4286 // -- esp[0] : return address
4287 // -----------------------------------
4288
4289 // Allocate an empty rest parameter array.
4290 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004291 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004292 __ bind(&done_allocate);
4293
4294 // Setup the rest parameter array in rax.
4295 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
4296 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
4297 __ mov(ecx, isolate()->factory()->empty_fixed_array());
4298 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
4299 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
4300 __ mov(FieldOperand(eax, JSArray::kLengthOffset),
4301 Immediate(Smi::FromInt(0)));
4302 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4303 __ Ret();
4304
4305 // Fall back to %AllocateInNewSpace.
4306 __ bind(&allocate);
4307 {
4308 FrameScope scope(masm, StackFrame::INTERNAL);
4309 __ Push(Smi::FromInt(JSArray::kSize));
4310 __ CallRuntime(Runtime::kAllocateInNewSpace);
4311 }
4312 __ jmp(&done_allocate);
4313 }
4314
4315 __ bind(&rest_parameters);
4316 {
4317 // Compute the pointer to the first rest parameter (skippping the receiver).
4318 __ lea(ebx,
4319 Operand(ebx, eax, times_half_pointer_size,
4320 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4321
4322 // ----------- S t a t e -------------
4323 // -- esi : context
4324 // -- eax : number of rest parameters (tagged)
4325 // -- ebx : pointer to first rest parameters
4326 // -- esp[0] : return address
4327 // -----------------------------------
4328
4329 // Allocate space for the rest parameter array plus the backing store.
4330 Label allocate, done_allocate;
4331 __ lea(ecx, Operand(eax, times_half_pointer_size,
4332 JSArray::kSize + FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01004333 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004334 __ bind(&done_allocate);
4335
4336 // Setup the elements array in edx.
4337 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
4338 isolate()->factory()->fixed_array_map());
4339 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
4340 {
4341 Label loop, done_loop;
4342 __ Move(ecx, Smi::FromInt(0));
4343 __ bind(&loop);
4344 __ cmp(ecx, eax);
4345 __ j(equal, &done_loop, Label::kNear);
4346 __ mov(edi, Operand(ebx, 0 * kPointerSize));
4347 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
4348 FixedArray::kHeaderSize),
4349 edi);
4350 __ sub(ebx, Immediate(1 * kPointerSize));
4351 __ add(ecx, Immediate(Smi::FromInt(1)));
4352 __ jmp(&loop);
4353 __ bind(&done_loop);
4354 }
4355
4356 // Setup the rest parameter array in edi.
4357 __ lea(edi,
4358 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
4359 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
4360 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
4361 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
4362 isolate()->factory()->empty_fixed_array());
4363 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
4364 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
4365 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4366 __ mov(eax, edi);
4367 __ Ret();
4368
Ben Murdoch61f157c2016-09-16 13:49:30 +01004369 // Fall back to %AllocateInNewSpace (if not too big).
4370 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004371 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004372 __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
4373 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004374 {
4375 FrameScope scope(masm, StackFrame::INTERNAL);
4376 __ SmiTag(ecx);
4377 __ Push(eax);
4378 __ Push(ebx);
4379 __ Push(ecx);
4380 __ CallRuntime(Runtime::kAllocateInNewSpace);
4381 __ mov(edx, eax);
4382 __ Pop(ebx);
4383 __ Pop(eax);
4384 }
4385 __ jmp(&done_allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004386
4387 // Fall back to %NewRestParameter.
4388 __ bind(&too_big_for_new_space);
4389 __ PopReturnAddressTo(ecx);
4390 // We reload the function from the caller frame due to register pressure
4391 // within this stub. This is the slow path, hence reloading is preferable.
4392 if (skip_stub_frame()) {
4393 // For Ignition we need to skip the handler/stub frame to reach the
4394 // JavaScript frame for the function.
4395 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4396 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
4397 } else {
4398 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
4399 }
4400 __ PushReturnAddressFrom(ecx);
4401 __ TailCallRuntime(Runtime::kNewRestParameter);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004402 }
4403}
4404
4405void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4406 // ----------- S t a t e -------------
4407 // -- edi : function
4408 // -- esi : context
4409 // -- ebp : frame pointer
4410 // -- esp[0] : return address
4411 // -----------------------------------
4412 __ AssertFunction(edi);
4413
Ben Murdochc5610432016-08-08 18:44:38 +01004414 // Make ecx point to the JavaScript frame.
4415 __ mov(ecx, ebp);
4416 if (skip_stub_frame()) {
4417 // For Ignition we need to skip the handler/stub frame to reach the
4418 // JavaScript frame for the function.
4419 __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4420 }
4421 if (FLAG_debug_code) {
4422 Label ok;
4423 __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
4424 __ j(equal, &ok);
4425 __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
4426 __ bind(&ok);
4427 }
4428
Ben Murdoch097c5b22016-05-18 11:27:45 +01004429 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
Ben Murdochc5610432016-08-08 18:44:38 +01004430 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4431 __ mov(ebx,
4432 FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
4433 __ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
Ben Murdoch097c5b22016-05-18 11:27:45 +01004434 StandardFrameConstants::kCallerSPOffset));
4435
Ben Murdochc5610432016-08-08 18:44:38 +01004436 // ebx : number of parameters (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004437 // edx : parameters pointer
4438 // edi : function
Ben Murdochc5610432016-08-08 18:44:38 +01004439 // ecx : JavaScript frame pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004440 // esp[0] : return address
4441
4442 // Check if the calling frame is an arguments adaptor frame.
4443 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004444 __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
4445 __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004446 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4447 __ j(equal, &adaptor_frame, Label::kNear);
4448
4449 // No adaptor, parameter count = argument count.
Ben Murdochc5610432016-08-08 18:44:38 +01004450 __ mov(ecx, ebx);
4451 __ push(ebx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004452 __ jmp(&try_allocate, Label::kNear);
4453
4454 // We have an adaptor frame. Patch the parameters pointer.
4455 __ bind(&adaptor_frame);
Ben Murdochc5610432016-08-08 18:44:38 +01004456 __ push(ebx);
4457 __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004458 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4459 __ lea(edx,
4460 Operand(edx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
4461
4462 // ebx = parameter count (tagged)
4463 // ecx = argument count (smi-tagged)
4464 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
4465 __ cmp(ebx, ecx);
4466 __ j(less_equal, &try_allocate, Label::kNear);
4467 __ mov(ebx, ecx);
4468
4469 // Save mapped parameter count and function.
4470 __ bind(&try_allocate);
4471 __ push(edi);
4472 __ push(ebx);
4473
4474 // Compute the sizes of backing store, parameter map, and arguments object.
4475 // 1. Parameter map, has 2 extra words containing context and backing store.
4476 const int kParameterMapHeaderSize =
4477 FixedArray::kHeaderSize + 2 * kPointerSize;
4478 Label no_parameter_map;
4479 __ test(ebx, ebx);
4480 __ j(zero, &no_parameter_map, Label::kNear);
4481 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
4482 __ bind(&no_parameter_map);
4483
4484 // 2. Backing store.
4485 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
4486
4487 // 3. Arguments object.
4488 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
4489
4490 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004491 __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004492
4493 // eax = address of new object(s) (tagged)
4494 // ecx = argument count (smi-tagged)
4495 // esp[0] = mapped parameter count (tagged)
4496 // esp[4] = function
4497 // esp[8] = parameter count (tagged)
4498 // Get the arguments map from the current native context into edi.
4499 Label has_mapped_parameters, instantiate;
4500 __ mov(edi, NativeContextOperand());
4501 __ mov(ebx, Operand(esp, 0 * kPointerSize));
4502 __ test(ebx, ebx);
4503 __ j(not_zero, &has_mapped_parameters, Label::kNear);
4504 __ mov(
4505 edi,
4506 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
4507 __ jmp(&instantiate, Label::kNear);
4508
4509 __ bind(&has_mapped_parameters);
4510 __ mov(edi, Operand(edi, Context::SlotOffset(
4511 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
4512 __ bind(&instantiate);
4513
4514 // eax = address of new object (tagged)
4515 // ebx = mapped parameter count (tagged)
4516 // ecx = argument count (smi-tagged)
4517 // edi = address of arguments map (tagged)
4518 // esp[0] = mapped parameter count (tagged)
4519 // esp[4] = function
4520 // esp[8] = parameter count (tagged)
4521 // Copy the JS object part.
4522 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
4523 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
4524 masm->isolate()->factory()->empty_fixed_array());
4525 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
4526 masm->isolate()->factory()->empty_fixed_array());
4527
4528 // Set up the callee in-object property.
4529 STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
4530 __ mov(edi, Operand(esp, 1 * kPointerSize));
4531 __ AssertNotSmi(edi);
4532 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
4533
4534 // Use the length (smi tagged) and set that as an in-object property too.
4535 __ AssertSmi(ecx);
4536 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
4537
4538 // Set up the elements pointer in the allocated arguments object.
4539 // If we allocated a parameter map, edi will point there, otherwise to the
4540 // backing store.
4541 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
4542 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
4543
4544 // eax = address of new object (tagged)
4545 // ebx = mapped parameter count (tagged)
4546 // ecx = argument count (tagged)
4547 // edx = address of receiver argument
4548 // edi = address of parameter map or backing store (tagged)
4549 // esp[0] = mapped parameter count (tagged)
4550 // esp[4] = function
4551 // esp[8] = parameter count (tagged)
4552 // Free two registers.
4553 __ push(edx);
4554 __ push(eax);
4555
4556 // Initialize parameter map. If there are no mapped arguments, we're done.
4557 Label skip_parameter_map;
4558 __ test(ebx, ebx);
4559 __ j(zero, &skip_parameter_map);
4560
4561 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4562 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
4563 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
4564 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
4565 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
4566 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
4567 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
4568
4569 // Copy the parameter slots and the holes in the arguments.
4570 // We need to fill in mapped_parameter_count slots. They index the context,
4571 // where parameters are stored in reverse order, at
4572 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4573 // The mapped parameter thus need to get indices
4574 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4575 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4576 // We loop from right to left.
4577 Label parameters_loop, parameters_test;
4578 __ push(ecx);
4579 __ mov(eax, Operand(esp, 3 * kPointerSize));
4580 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4581 __ add(ebx, Operand(esp, 5 * kPointerSize));
4582 __ sub(ebx, eax);
4583 __ mov(ecx, isolate()->factory()->the_hole_value());
4584 __ mov(edx, edi);
4585 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
4586 // eax = loop variable (tagged)
4587 // ebx = mapping index (tagged)
4588 // ecx = the hole value
4589 // edx = address of parameter map (tagged)
4590 // edi = address of backing store (tagged)
4591 // esp[0] = argument count (tagged)
4592 // esp[4] = address of new object (tagged)
4593 // esp[8] = address of receiver argument
4594 // esp[12] = mapped parameter count (tagged)
4595 // esp[16] = function
4596 // esp[20] = parameter count (tagged)
4597 __ jmp(&parameters_test, Label::kNear);
4598
4599 __ bind(&parameters_loop);
4600 __ sub(eax, Immediate(Smi::FromInt(1)));
4601 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
4602 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
4603 __ add(ebx, Immediate(Smi::FromInt(1)));
4604 __ bind(&parameters_test);
4605 __ test(eax, eax);
4606 __ j(not_zero, &parameters_loop, Label::kNear);
4607 __ pop(ecx);
4608
4609 __ bind(&skip_parameter_map);
4610
4611 // ecx = argument count (tagged)
4612 // edi = address of backing store (tagged)
4613 // esp[0] = address of new object (tagged)
4614 // esp[4] = address of receiver argument
4615 // esp[8] = mapped parameter count (tagged)
4616 // esp[12] = function
4617 // esp[16] = parameter count (tagged)
4618 // Copy arguments header and remaining slots (if there are any).
4619 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4620 Immediate(isolate()->factory()->fixed_array_map()));
4621 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
4622
4623 Label arguments_loop, arguments_test;
4624 __ mov(ebx, Operand(esp, 2 * kPointerSize));
4625 __ mov(edx, Operand(esp, 1 * kPointerSize));
4626 __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
4627 __ sub(edx, ebx);
4628 __ jmp(&arguments_test, Label::kNear);
4629
4630 __ bind(&arguments_loop);
4631 __ sub(edx, Immediate(kPointerSize));
4632 __ mov(eax, Operand(edx, 0));
4633 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
4634 __ add(ebx, Immediate(Smi::FromInt(1)));
4635
4636 __ bind(&arguments_test);
4637 __ cmp(ebx, ecx);
4638 __ j(less, &arguments_loop, Label::kNear);
4639
4640 // Restore.
4641 __ pop(eax); // Address of arguments object.
4642 __ Drop(4);
4643
4644 // Return.
4645 __ ret(0);
4646
4647 // Do the runtime call to allocate the arguments object.
4648 __ bind(&runtime);
4649 __ pop(eax); // Remove saved mapped parameter count.
4650 __ pop(edi); // Pop saved function.
4651 __ pop(eax); // Remove saved parameter count.
4652 __ pop(eax); // Pop return address.
4653 __ push(edi); // Push function.
4654 __ push(edx); // Push parameters pointer.
4655 __ push(ecx); // Push parameter count.
4656 __ push(eax); // Push return address.
4657 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4658}
4659
4660void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4661 // ----------- S t a t e -------------
4662 // -- edi : function
4663 // -- esi : context
4664 // -- ebp : frame pointer
4665 // -- esp[0] : return address
4666 // -----------------------------------
4667 __ AssertFunction(edi);
4668
Ben Murdochc5610432016-08-08 18:44:38 +01004669 // Make edx point to the JavaScript frame.
4670 __ mov(edx, ebp);
4671 if (skip_stub_frame()) {
4672 // For Ignition we need to skip the handler/stub frame to reach the
4673 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004674 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004675 }
4676 if (FLAG_debug_code) {
4677 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004678 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004679 __ j(equal, &ok);
4680 __ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
4681 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004682 }
4683
4684 // Check if we have an arguments adaptor frame below the function frame.
4685 Label arguments_adaptor, arguments_done;
4686 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004687 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01004688 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4689 __ j(equal, &arguments_adaptor, Label::kNear);
4690 {
4691 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4692 __ mov(eax,
4693 FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
4694 __ lea(ebx,
4695 Operand(edx, eax, times_half_pointer_size,
4696 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4697 }
4698 __ jmp(&arguments_done, Label::kNear);
4699 __ bind(&arguments_adaptor);
4700 {
4701 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4702 __ lea(ebx,
4703 Operand(ebx, eax, times_half_pointer_size,
4704 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4705 }
4706 __ bind(&arguments_done);
4707
4708 // ----------- S t a t e -------------
4709 // -- eax : number of arguments (tagged)
4710 // -- ebx : pointer to the first argument
4711 // -- esi : context
4712 // -- esp[0] : return address
4713 // -----------------------------------
4714
4715 // Allocate space for the strict arguments object plus the backing store.
4716 Label allocate, done_allocate;
4717 __ lea(ecx,
4718 Operand(eax, times_half_pointer_size,
4719 JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +01004720 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004721 __ bind(&done_allocate);
4722
4723 // Setup the elements array in edx.
4724 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
4725 isolate()->factory()->fixed_array_map());
4726 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
4727 {
4728 Label loop, done_loop;
4729 __ Move(ecx, Smi::FromInt(0));
4730 __ bind(&loop);
4731 __ cmp(ecx, eax);
4732 __ j(equal, &done_loop, Label::kNear);
4733 __ mov(edi, Operand(ebx, 0 * kPointerSize));
4734 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
4735 FixedArray::kHeaderSize),
4736 edi);
4737 __ sub(ebx, Immediate(1 * kPointerSize));
4738 __ add(ecx, Immediate(Smi::FromInt(1)));
4739 __ jmp(&loop);
4740 __ bind(&done_loop);
4741 }
4742
4743 // Setup the rest parameter array in edi.
4744 __ lea(edi,
4745 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
4746 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
4747 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
4748 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
4749 isolate()->factory()->empty_fixed_array());
4750 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
4751 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
4752 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4753 __ mov(eax, edi);
4754 __ Ret();
4755
Ben Murdoch61f157c2016-09-16 13:49:30 +01004756 // Fall back to %AllocateInNewSpace (if not too big).
4757 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004758 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004759 __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
4760 __ j(greater, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004761 {
4762 FrameScope scope(masm, StackFrame::INTERNAL);
4763 __ SmiTag(ecx);
4764 __ Push(eax);
4765 __ Push(ebx);
4766 __ Push(ecx);
4767 __ CallRuntime(Runtime::kAllocateInNewSpace);
4768 __ mov(edx, eax);
4769 __ Pop(ebx);
4770 __ Pop(eax);
4771 }
4772 __ jmp(&done_allocate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004773
Ben Murdoch61f157c2016-09-16 13:49:30 +01004774 // Fall back to %NewStrictArguments.
4775 __ bind(&too_big_for_new_space);
4776 __ PopReturnAddressTo(ecx);
4777 // We reload the function from the caller frame due to register pressure
4778 // within this stub. This is the slow path, hence reloading is preferable.
4779 if (skip_stub_frame()) {
4780 // For Ignition we need to skip the handler/stub frame to reach the
4781 // JavaScript frame for the function.
4782 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4783 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
4784 } else {
4785 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004786 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01004787 __ PushReturnAddressFrom(ecx);
4788 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004789}
4790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004791void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4792 Register context_reg = esi;
4793 Register slot_reg = ebx;
4794 Register value_reg = eax;
4795 Register cell_reg = edi;
4796 Register cell_details_reg = edx;
4797 Register cell_value_reg = ecx;
4798 Label fast_heapobject_case, fast_smi_case, slow_case;
4799
4800 if (FLAG_debug_code) {
4801 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
4802 __ Check(not_equal, kUnexpectedValue);
4803 }
4804
4805 // Go up context chain to the script context.
4806 for (int i = 0; i < depth(); ++i) {
4807 __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4808 context_reg = cell_reg;
4809 }
4810
4811 // Load the PropertyCell at the specified slot.
4812 __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
4813
4814 // Load PropertyDetails for the cell (actually only the cell_type and kind).
4815 __ mov(cell_details_reg,
4816 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
4817 __ SmiUntag(cell_details_reg);
4818 __ and_(cell_details_reg,
4819 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
4820 PropertyDetails::KindField::kMask |
4821 PropertyDetails::kAttributesReadOnlyMask));
4822
4823 // Check if PropertyCell holds mutable data.
4824 Label not_mutable_data;
4825 __ cmp(cell_details_reg,
4826 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4827 PropertyCellType::kMutable) |
4828 PropertyDetails::KindField::encode(kData)));
4829 __ j(not_equal, &not_mutable_data);
4830 __ JumpIfSmi(value_reg, &fast_smi_case);
4831 __ bind(&fast_heapobject_case);
4832 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
4833 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
4834 cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
4835 OMIT_SMI_CHECK);
4836 // RecordWriteField clobbers the value register, so we need to reload.
4837 __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
4838 __ Ret();
4839 __ bind(&not_mutable_data);
4840
4841 // Check if PropertyCell value matches the new value (relevant for Constant,
4842 // ConstantType and Undefined cells).
4843 Label not_same_value;
4844 __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
4845 __ cmp(cell_value_reg, value_reg);
4846 __ j(not_equal, &not_same_value,
4847 FLAG_debug_code ? Label::kFar : Label::kNear);
4848 // Make sure the PropertyCell is not marked READ_ONLY.
4849 __ test(cell_details_reg,
4850 Immediate(PropertyDetails::kAttributesReadOnlyMask));
4851 __ j(not_zero, &slow_case);
4852 if (FLAG_debug_code) {
4853 Label done;
4854 // This can only be true for Constant, ConstantType and Undefined cells,
4855 // because we never store the_hole via this stub.
4856 __ cmp(cell_details_reg,
4857 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4858 PropertyCellType::kConstant) |
4859 PropertyDetails::KindField::encode(kData)));
4860 __ j(equal, &done);
4861 __ cmp(cell_details_reg,
4862 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4863 PropertyCellType::kConstantType) |
4864 PropertyDetails::KindField::encode(kData)));
4865 __ j(equal, &done);
4866 __ cmp(cell_details_reg,
4867 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4868 PropertyCellType::kUndefined) |
4869 PropertyDetails::KindField::encode(kData)));
4870 __ Check(equal, kUnexpectedValue);
4871 __ bind(&done);
4872 }
4873 __ Ret();
4874 __ bind(&not_same_value);
4875
4876 // Check if PropertyCell contains data with constant type (and is not
4877 // READ_ONLY).
4878 __ cmp(cell_details_reg,
4879 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4880 PropertyCellType::kConstantType) |
4881 PropertyDetails::KindField::encode(kData)));
4882 __ j(not_equal, &slow_case, Label::kNear);
4883
4884 // Now either both old and new values must be SMIs or both must be heap
4885 // objects with same map.
4886 Label value_is_heap_object;
4887 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
4888 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
4889 // Old and new values are SMIs, no need for a write barrier here.
4890 __ bind(&fast_smi_case);
4891 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
4892 __ Ret();
4893 __ bind(&value_is_heap_object);
4894 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
4895 Register cell_value_map_reg = cell_value_reg;
4896 __ mov(cell_value_map_reg,
4897 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
4898 __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
4899 __ j(equal, &fast_heapobject_case);
4900
4901 // Fallback to the runtime.
4902 __ bind(&slow_case);
4903 __ SmiTag(slot_reg);
4904 __ Pop(cell_reg); // Pop return address.
4905 __ Push(slot_reg);
4906 __ Push(value_reg);
4907 __ Push(cell_reg); // Push return address.
4908 __ TailCallRuntime(is_strict(language_mode())
4909 ? Runtime::kStoreGlobalViaContext_Strict
4910 : Runtime::kStoreGlobalViaContext_Sloppy);
4911}
4912
4913
4914// Generates an Operand for saving parameters after PrepareCallApiFunction.
4915static Operand ApiParameterOperand(int index) {
4916 return Operand(esp, index * kPointerSize);
4917}
4918
4919
4920// Prepares stack to put arguments (aligns and so on). Reserves
4921// space for return value if needed (assumes the return value is a handle).
4922// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
4923// etc. Saves context (esi). If space was reserved for return value then
4924// stores the pointer to the reserved slot into esi.
4925static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
4926 __ EnterApiExitFrame(argc);
4927 if (__ emit_debug_code()) {
4928 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
4929 }
4930}
4931
4932
4933// Calls an API function. Allocates HandleScope, extracts returned value
4934// from handle and propagates exceptions. Clobbers ebx, edi and
4935// caller-save registers. Restores context. On return removes
4936// stack_space * kPointerSize (GCed).
4937static void CallApiFunctionAndReturn(MacroAssembler* masm,
4938 Register function_address,
4939 ExternalReference thunk_ref,
4940 Operand thunk_last_arg, int stack_space,
4941 Operand* stack_space_operand,
4942 Operand return_value_operand,
4943 Operand* context_restore_operand) {
4944 Isolate* isolate = masm->isolate();
4945
4946 ExternalReference next_address =
4947 ExternalReference::handle_scope_next_address(isolate);
4948 ExternalReference limit_address =
4949 ExternalReference::handle_scope_limit_address(isolate);
4950 ExternalReference level_address =
4951 ExternalReference::handle_scope_level_address(isolate);
4952
4953 DCHECK(edx.is(function_address));
4954 // Allocate HandleScope in callee-save registers.
4955 __ mov(ebx, Operand::StaticVariable(next_address));
4956 __ mov(edi, Operand::StaticVariable(limit_address));
4957 __ add(Operand::StaticVariable(level_address), Immediate(1));
4958
4959 if (FLAG_log_timer_events) {
4960 FrameScope frame(masm, StackFrame::MANUAL);
4961 __ PushSafepointRegisters();
4962 __ PrepareCallCFunction(1, eax);
4963 __ mov(Operand(esp, 0),
4964 Immediate(ExternalReference::isolate_address(isolate)));
4965 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4966 1);
4967 __ PopSafepointRegisters();
4968 }
4969
4970
4971 Label profiler_disabled;
4972 Label end_profiler_check;
4973 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
Ben Murdochda12d292016-06-02 14:46:10 +01004974 __ cmpb(Operand(eax, 0), Immediate(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004975 __ j(zero, &profiler_disabled);
4976
4977 // Additional parameter is the address of the actual getter function.
4978 __ mov(thunk_last_arg, function_address);
4979 // Call the api function.
4980 __ mov(eax, Immediate(thunk_ref));
4981 __ call(eax);
4982 __ jmp(&end_profiler_check);
4983
4984 __ bind(&profiler_disabled);
4985 // Call the api function.
4986 __ call(function_address);
4987 __ bind(&end_profiler_check);
4988
4989 if (FLAG_log_timer_events) {
4990 FrameScope frame(masm, StackFrame::MANUAL);
4991 __ PushSafepointRegisters();
4992 __ PrepareCallCFunction(1, eax);
4993 __ mov(Operand(esp, 0),
4994 Immediate(ExternalReference::isolate_address(isolate)));
4995 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4996 1);
4997 __ PopSafepointRegisters();
4998 }
4999
5000 Label prologue;
5001 // Load the value from ReturnValue
5002 __ mov(eax, return_value_operand);
5003
5004 Label promote_scheduled_exception;
5005 Label delete_allocated_handles;
5006 Label leave_exit_frame;
5007
5008 __ bind(&prologue);
5009 // No more valid handles (the result handle was the last one). Restore
5010 // previous handle scope.
5011 __ mov(Operand::StaticVariable(next_address), ebx);
5012 __ sub(Operand::StaticVariable(level_address), Immediate(1));
5013 __ Assert(above_equal, kInvalidHandleScopeLevel);
5014 __ cmp(edi, Operand::StaticVariable(limit_address));
5015 __ j(not_equal, &delete_allocated_handles);
5016
5017 // Leave the API exit frame.
5018 __ bind(&leave_exit_frame);
5019 bool restore_context = context_restore_operand != NULL;
5020 if (restore_context) {
5021 __ mov(esi, *context_restore_operand);
5022 }
5023 if (stack_space_operand != nullptr) {
5024 __ mov(ebx, *stack_space_operand);
5025 }
5026 __ LeaveApiExitFrame(!restore_context);
5027
5028 // Check if the function scheduled an exception.
5029 ExternalReference scheduled_exception_address =
5030 ExternalReference::scheduled_exception_address(isolate);
5031 __ cmp(Operand::StaticVariable(scheduled_exception_address),
5032 Immediate(isolate->factory()->the_hole_value()));
5033 __ j(not_equal, &promote_scheduled_exception);
5034
5035#if DEBUG
5036 // Check if the function returned a valid JavaScript value.
5037 Label ok;
5038 Register return_value = eax;
5039 Register map = ecx;
5040
5041 __ JumpIfSmi(return_value, &ok, Label::kNear);
5042 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
5043
5044 __ CmpInstanceType(map, LAST_NAME_TYPE);
5045 __ j(below_equal, &ok, Label::kNear);
5046
5047 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5048 __ j(above_equal, &ok, Label::kNear);
5049
5050 __ cmp(map, isolate->factory()->heap_number_map());
5051 __ j(equal, &ok, Label::kNear);
5052
5053 __ cmp(return_value, isolate->factory()->undefined_value());
5054 __ j(equal, &ok, Label::kNear);
5055
5056 __ cmp(return_value, isolate->factory()->true_value());
5057 __ j(equal, &ok, Label::kNear);
5058
5059 __ cmp(return_value, isolate->factory()->false_value());
5060 __ j(equal, &ok, Label::kNear);
5061
5062 __ cmp(return_value, isolate->factory()->null_value());
5063 __ j(equal, &ok, Label::kNear);
5064
5065 __ Abort(kAPICallReturnedInvalidObject);
5066
5067 __ bind(&ok);
5068#endif
5069
5070 if (stack_space_operand != nullptr) {
5071 DCHECK_EQ(0, stack_space);
5072 __ pop(ecx);
5073 __ add(esp, ebx);
5074 __ jmp(ecx);
5075 } else {
5076 __ ret(stack_space * kPointerSize);
5077 }
5078
5079 // Re-throw by promoting a scheduled exception.
5080 __ bind(&promote_scheduled_exception);
5081 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5082
5083 // HandleScope limit has changed. Delete allocated extensions.
5084 ExternalReference delete_extensions =
5085 ExternalReference::delete_handle_scope_extensions(isolate);
5086 __ bind(&delete_allocated_handles);
5087 __ mov(Operand::StaticVariable(limit_address), edi);
5088 __ mov(edi, eax);
5089 __ mov(Operand(esp, 0),
5090 Immediate(ExternalReference::isolate_address(isolate)));
5091 __ mov(eax, Immediate(delete_extensions));
5092 __ call(eax);
5093 __ mov(eax, edi);
5094 __ jmp(&leave_exit_frame);
5095}
5096
Ben Murdochda12d292016-06-02 14:46:10 +01005097void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005098 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005099 // -- edi : callee
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005100 // -- ebx : call_data
5101 // -- ecx : holder
5102 // -- edx : api_function_address
5103 // -- esi : context
5104 // --
5105 // -- esp[0] : return address
5106 // -- esp[4] : last argument
5107 // -- ...
5108 // -- esp[argc * 4] : first argument
5109 // -- esp[(argc + 1) * 4] : receiver
5110 // -----------------------------------
5111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005112 Register callee = edi;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005113 Register call_data = ebx;
5114 Register holder = ecx;
5115 Register api_function_address = edx;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005116 Register context = esi;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005117 Register return_address = eax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005118
5119 typedef FunctionCallbackArguments FCA;
5120
5121 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5122 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5123 STATIC_ASSERT(FCA::kDataIndex == 4);
5124 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5125 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5126 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5127 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005128 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5129 STATIC_ASSERT(FCA::kArgsLength == 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005130
Ben Murdochda12d292016-06-02 14:46:10 +01005131 __ pop(return_address);
Ben Murdochc5610432016-08-08 18:44:38 +01005132
5133 // new target
5134 __ PushRoot(Heap::kUndefinedValueRootIndex);
5135
Ben Murdochda12d292016-06-02 14:46:10 +01005136 // context save.
5137 __ push(context);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005138
5139 // callee
5140 __ push(callee);
5141
5142 // call data
5143 __ push(call_data);
5144
5145 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005146 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005147 // return value
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005148 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005149 // return value default
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005150 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005151 } else {
5152 // return value
5153 __ push(scratch);
5154 // return value default
5155 __ push(scratch);
5156 }
5157 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005158 __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005159 // holder
5160 __ push(holder);
5161
5162 __ mov(scratch, esp);
5163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005164 // push return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005165 __ push(return_address);
5166
Ben Murdochda12d292016-06-02 14:46:10 +01005167 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005168 // load context from callee
5169 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5170 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005171
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005172 // API function gets reference to the v8::Arguments. If CPU profiler
5173 // is enabled wrapper function will be called and we need to pass
5174 // address of the callback as additional parameter, always allocate
5175 // space for it.
5176 const int kApiArgc = 1 + 1;
5177
5178 // Allocate the v8::Arguments structure in the arguments' space since
5179 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005180 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005182 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005183
5184 // FunctionCallbackInfo::implicit_args_.
5185 __ mov(ApiParameterOperand(2), scratch);
Ben Murdochda12d292016-06-02 14:46:10 +01005186 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
5187 // FunctionCallbackInfo::values_.
5188 __ mov(ApiParameterOperand(3), scratch);
5189 // FunctionCallbackInfo::length_.
5190 __ Move(ApiParameterOperand(4), Immediate(argc()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005191
5192 // v8::InvocationCallback's argument.
5193 __ lea(scratch, ApiParameterOperand(2));
5194 __ mov(ApiParameterOperand(0), scratch);
5195
5196 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005197 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005198
5199 Operand context_restore_operand(ebp,
5200 (2 + FCA::kContextSaveIndex) * kPointerSize);
5201 // Stores return the first js argument
5202 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005203 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005204 return_value_offset = 2 + FCA::kArgsLength;
5205 } else {
5206 return_value_offset = 2 + FCA::kReturnValueOffset;
5207 }
5208 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005209 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005210 Operand length_operand = ApiParameterOperand(4);
5211 Operand* stack_space_operand = &length_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005212 stack_space = argc() + FCA::kArgsLength + 1;
5213 stack_space_operand = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005214 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5215 ApiParameterOperand(1), stack_space,
5216 stack_space_operand, return_value_operand,
5217 &context_restore_operand);
5218}
5219
5220
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005221void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01005222 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5223 // name below the exit frame to make GC aware of them.
5224 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5225 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5226 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5227 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5228 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5229 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5230 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5231 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
5232
5233 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5234 Register holder = ApiGetterDescriptor::HolderRegister();
5235 Register callback = ApiGetterDescriptor::CallbackRegister();
5236 Register scratch = ebx;
5237 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5238
5239 __ pop(scratch); // Pop return address to extend the frame.
5240 __ push(receiver);
5241 __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
5242 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue
5243 // ReturnValue default value
5244 __ PushRoot(Heap::kUndefinedValueRootIndex);
5245 __ push(Immediate(ExternalReference::isolate_address(isolate())));
5246 __ push(holder);
5247 __ push(Immediate(Smi::FromInt(0))); // should_throw_on_error -> false
5248 __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
5249 __ push(scratch); // Restore return address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005250
Ben Murdoch097c5b22016-05-18 11:27:45 +01005251 // v8::PropertyCallbackInfo::args_ array and name handle.
5252 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5253
5254 // Allocate v8::PropertyCallbackInfo object, arguments for callback and
5255 // space for optional callback address parameter (in case CPU profiler is
5256 // active) in non-GCed stack space.
5257 const int kApiArgc = 3 + 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005258
Ben Murdoch097c5b22016-05-18 11:27:45 +01005259 // Load address of v8::PropertyAccessorInfo::args_ array.
5260 __ lea(scratch, Operand(esp, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005262 PrepareCallApiFunction(masm, kApiArgc);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005263 // Create v8::PropertyCallbackInfo object on the stack and initialize
5264 // it's args_ field.
5265 Operand info_object = ApiParameterOperand(3);
5266 __ mov(info_object, scratch);
5267
Ben Murdochc5610432016-08-08 18:44:38 +01005268 // Name as handle.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005269 __ sub(scratch, Immediate(kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01005270 __ mov(ApiParameterOperand(0), scratch);
5271 // Arguments pointer.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005272 __ lea(scratch, info_object);
Ben Murdochc5610432016-08-08 18:44:38 +01005273 __ mov(ApiParameterOperand(1), scratch);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005274 // Reserve space for optional callback address parameter.
5275 Operand thunk_last_arg = ApiParameterOperand(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005276
5277 ExternalReference thunk_ref =
5278 ExternalReference::invoke_accessor_getter_callback(isolate());
5279
Ben Murdochc5610432016-08-08 18:44:38 +01005280 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
5281 Register function_address = edx;
5282 __ mov(function_address,
5283 FieldOperand(scratch, Foreign::kForeignAddressOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005284 // +3 is to skip prolog, return address and name handle.
5285 Operand return_value_operand(
5286 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01005287 CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
5288 kStackUnwindSpace, nullptr, return_value_operand,
5289 NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005290}
5291
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005292#undef __
5293
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005294} // namespace internal
5295} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005296
5297#endif // V8_TARGET_ARCH_X87