blob: 3e26b5243623094ece76eb98f6927042f01ae13d [file] [log] [blame]
Ben Murdochc5610432016-08-08 18:44:38 +01001// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/code-stub-assembler.h"
6#include "src/code-factory.h"
7
8namespace v8 {
9namespace internal {
10
11using compiler::Node;
12
13CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
14 const CallInterfaceDescriptor& descriptor,
15 Code::Flags flags, const char* name,
16 size_t result_size)
17 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
18 result_size) {}
19
20CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
21 int parameter_count, Code::Flags flags,
22 const char* name)
23 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
24
25Node* CodeStubAssembler::BooleanMapConstant() {
26 return HeapConstant(isolate()->factory()->boolean_map());
27}
28
29Node* CodeStubAssembler::EmptyStringConstant() {
30 return LoadRoot(Heap::kempty_stringRootIndex);
31}
32
33Node* CodeStubAssembler::HeapNumberMapConstant() {
34 return HeapConstant(isolate()->factory()->heap_number_map());
35}
36
37Node* CodeStubAssembler::NoContextConstant() {
38 return SmiConstant(Smi::FromInt(0));
39}
40
41Node* CodeStubAssembler::NullConstant() {
42 return LoadRoot(Heap::kNullValueRootIndex);
43}
44
45Node* CodeStubAssembler::UndefinedConstant() {
46 return LoadRoot(Heap::kUndefinedValueRootIndex);
47}
48
49Node* CodeStubAssembler::StaleRegisterConstant() {
50 return LoadRoot(Heap::kStaleRegisterRootIndex);
51}
52
53Node* CodeStubAssembler::Float64Round(Node* x) {
54 Node* one = Float64Constant(1.0);
55 Node* one_half = Float64Constant(0.5);
56
57 Variable var_x(this, MachineRepresentation::kFloat64);
58 Label return_x(this);
59
60 // Round up {x} towards Infinity.
61 var_x.Bind(Float64Ceil(x));
62
63 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
64 &return_x);
65 var_x.Bind(Float64Sub(var_x.value(), one));
66 Goto(&return_x);
67
68 Bind(&return_x);
69 return var_x.value();
70}
71
72Node* CodeStubAssembler::Float64Ceil(Node* x) {
73 if (IsFloat64RoundUpSupported()) {
74 return Float64RoundUp(x);
75 }
76
77 Node* one = Float64Constant(1.0);
78 Node* zero = Float64Constant(0.0);
79 Node* two_52 = Float64Constant(4503599627370496.0E0);
80 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
81
82 Variable var_x(this, MachineRepresentation::kFloat64);
83 Label return_x(this), return_minus_x(this);
84 var_x.Bind(x);
85
86 // Check if {x} is greater than zero.
87 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
88 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
89 &if_xnotgreaterthanzero);
90
91 Bind(&if_xgreaterthanzero);
92 {
93 // Just return {x} unless it's in the range ]0,2^52[.
94 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
95
96 // Round positive {x} towards Infinity.
97 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
98 GotoUnless(Float64LessThan(var_x.value(), x), &return_x);
99 var_x.Bind(Float64Add(var_x.value(), one));
100 Goto(&return_x);
101 }
102
103 Bind(&if_xnotgreaterthanzero);
104 {
105 // Just return {x} unless it's in the range ]-2^52,0[
106 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
107 GotoUnless(Float64LessThan(x, zero), &return_x);
108
109 // Round negated {x} towards Infinity and return the result negated.
110 Node* minus_x = Float64Neg(x);
111 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
112 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
113 var_x.Bind(Float64Sub(var_x.value(), one));
114 Goto(&return_minus_x);
115 }
116
117 Bind(&return_minus_x);
118 var_x.Bind(Float64Neg(var_x.value()));
119 Goto(&return_x);
120
121 Bind(&return_x);
122 return var_x.value();
123}
124
125Node* CodeStubAssembler::Float64Floor(Node* x) {
126 if (IsFloat64RoundDownSupported()) {
127 return Float64RoundDown(x);
128 }
129
130 Node* one = Float64Constant(1.0);
131 Node* zero = Float64Constant(0.0);
132 Node* two_52 = Float64Constant(4503599627370496.0E0);
133 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
134
135 Variable var_x(this, MachineRepresentation::kFloat64);
136 Label return_x(this), return_minus_x(this);
137 var_x.Bind(x);
138
139 // Check if {x} is greater than zero.
140 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
141 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
142 &if_xnotgreaterthanzero);
143
144 Bind(&if_xgreaterthanzero);
145 {
146 // Just return {x} unless it's in the range ]0,2^52[.
147 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
148
149 // Round positive {x} towards -Infinity.
150 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
151 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
152 var_x.Bind(Float64Sub(var_x.value(), one));
153 Goto(&return_x);
154 }
155
156 Bind(&if_xnotgreaterthanzero);
157 {
158 // Just return {x} unless it's in the range ]-2^52,0[
159 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
160 GotoUnless(Float64LessThan(x, zero), &return_x);
161
162 // Round negated {x} towards -Infinity and return the result negated.
163 Node* minus_x = Float64Neg(x);
164 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
165 GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
166 var_x.Bind(Float64Add(var_x.value(), one));
167 Goto(&return_minus_x);
168 }
169
170 Bind(&return_minus_x);
171 var_x.Bind(Float64Neg(var_x.value()));
172 Goto(&return_x);
173
174 Bind(&return_x);
175 return var_x.value();
176}
177
178Node* CodeStubAssembler::Float64Trunc(Node* x) {
179 if (IsFloat64RoundTruncateSupported()) {
180 return Float64RoundTruncate(x);
181 }
182
183 Node* one = Float64Constant(1.0);
184 Node* zero = Float64Constant(0.0);
185 Node* two_52 = Float64Constant(4503599627370496.0E0);
186 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
187
188 Variable var_x(this, MachineRepresentation::kFloat64);
189 Label return_x(this), return_minus_x(this);
190 var_x.Bind(x);
191
192 // Check if {x} is greater than 0.
193 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
194 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
195 &if_xnotgreaterthanzero);
196
197 Bind(&if_xgreaterthanzero);
198 {
199 if (IsFloat64RoundDownSupported()) {
200 var_x.Bind(Float64RoundDown(x));
201 } else {
202 // Just return {x} unless it's in the range ]0,2^52[.
203 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
204
205 // Round positive {x} towards -Infinity.
206 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
207 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
208 var_x.Bind(Float64Sub(var_x.value(), one));
209 }
210 Goto(&return_x);
211 }
212
213 Bind(&if_xnotgreaterthanzero);
214 {
215 if (IsFloat64RoundUpSupported()) {
216 var_x.Bind(Float64RoundUp(x));
217 Goto(&return_x);
218 } else {
219 // Just return {x} unless its in the range ]-2^52,0[.
220 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
221 GotoUnless(Float64LessThan(x, zero), &return_x);
222
223 // Round negated {x} towards -Infinity and return result negated.
224 Node* minus_x = Float64Neg(x);
225 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
226 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
227 var_x.Bind(Float64Sub(var_x.value(), one));
228 Goto(&return_minus_x);
229 }
230 }
231
232 Bind(&return_minus_x);
233 var_x.Bind(Float64Neg(var_x.value()));
234 Goto(&return_x);
235
236 Bind(&return_x);
237 return var_x.value();
238}
239
240Node* CodeStubAssembler::SmiFromWord32(Node* value) {
241 value = ChangeInt32ToIntPtr(value);
242 return WordShl(value, SmiShiftBitsConstant());
243}
244
245Node* CodeStubAssembler::SmiTag(Node* value) {
246 int32_t constant_value;
247 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
248 return SmiConstant(Smi::FromInt(constant_value));
249 }
250 return WordShl(value, SmiShiftBitsConstant());
251}
252
253Node* CodeStubAssembler::SmiUntag(Node* value) {
254 return WordSar(value, SmiShiftBitsConstant());
255}
256
257Node* CodeStubAssembler::SmiToWord32(Node* value) {
258 Node* result = WordSar(value, SmiShiftBitsConstant());
259 if (Is64()) {
260 result = TruncateInt64ToInt32(result);
261 }
262 return result;
263}
264
265Node* CodeStubAssembler::SmiToFloat64(Node* value) {
266 return ChangeInt32ToFloat64(SmiToWord32(value));
267}
268
269Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
270
271Node* CodeStubAssembler::SmiAddWithOverflow(Node* a, Node* b) {
272 return IntPtrAddWithOverflow(a, b);
273}
274
275Node* CodeStubAssembler::SmiSub(Node* a, Node* b) { return IntPtrSub(a, b); }
276
277Node* CodeStubAssembler::SmiSubWithOverflow(Node* a, Node* b) {
278 return IntPtrSubWithOverflow(a, b);
279}
280
281Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
282
283Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
284 return UintPtrGreaterThanOrEqual(a, b);
285}
286
287Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
288 return IntPtrLessThan(a, b);
289}
290
291Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
292 return IntPtrLessThanOrEqual(a, b);
293}
294
295Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
296 // TODO(bmeurer): Consider using Select once available.
297 Variable min(this, MachineRepresentation::kTagged);
298 Label if_a(this), if_b(this), join(this);
299 BranchIfSmiLessThan(a, b, &if_a, &if_b);
300 Bind(&if_a);
301 min.Bind(a);
302 Goto(&join);
303 Bind(&if_b);
304 min.Bind(b);
305 Goto(&join);
306 Bind(&join);
307 return min.value();
308}
309
310Node* CodeStubAssembler::WordIsSmi(Node* a) {
311 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
312}
313
314Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
315 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
316 IntPtrConstant(0));
317}
318
319Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
320 AllocationFlags flags,
321 Node* top_address,
322 Node* limit_address) {
323 Node* top = Load(MachineType::Pointer(), top_address);
324 Node* limit = Load(MachineType::Pointer(), limit_address);
325
326 // If there's not enough space, call the runtime.
327 Variable result(this, MachineRepresentation::kTagged);
328 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
329 Label merge_runtime(this, &result);
330
331 Node* new_top = IntPtrAdd(top, size_in_bytes);
332 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
333 &no_runtime_call);
334
335 Bind(&runtime_call);
336 // AllocateInTargetSpace does not use the context.
337 Node* context = SmiConstant(Smi::FromInt(0));
338
339 Node* runtime_result;
340 if (flags & kPretenured) {
341 Node* runtime_flags = SmiConstant(
342 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
343 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
344 runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
345 SmiTag(size_in_bytes), runtime_flags);
346 } else {
347 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace, context,
348 SmiTag(size_in_bytes));
349 }
350 result.Bind(runtime_result);
351 Goto(&merge_runtime);
352
353 // When there is enough space, return `top' and bump it up.
354 Bind(&no_runtime_call);
355 Node* no_runtime_result = top;
356 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
357 new_top);
358 no_runtime_result = BitcastWordToTagged(
359 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)));
360 result.Bind(no_runtime_result);
361 Goto(&merge_runtime);
362
363 Bind(&merge_runtime);
364 return result.value();
365}
366
367Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
368 AllocationFlags flags,
369 Node* top_address,
370 Node* limit_address) {
371 Node* top = Load(MachineType::Pointer(), top_address);
372 Node* limit = Load(MachineType::Pointer(), limit_address);
373 Variable adjusted_size(this, MachineType::PointerRepresentation());
374 adjusted_size.Bind(size_in_bytes);
375 if (flags & kDoubleAlignment) {
376 // TODO(epertoso): Simd128 alignment.
377 Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
378 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
379 &aligned);
380
381 Bind(&not_aligned);
382 Node* not_aligned_size =
383 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
384 adjusted_size.Bind(not_aligned_size);
385 Goto(&merge);
386
387 Bind(&aligned);
388 Goto(&merge);
389
390 Bind(&merge);
391 }
392
393 Variable address(this, MachineRepresentation::kTagged);
394 address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
395
396 Label needs_filler(this), doesnt_need_filler(this),
397 merge_address(this, &address);
398 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
399 &needs_filler);
400
401 Bind(&needs_filler);
402 // Store a filler and increase the address by kPointerSize.
403 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
404 // it when Simd128 alignment is supported.
405 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
406 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
407 address.Bind(BitcastWordToTagged(
408 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))));
409 Goto(&merge_address);
410
411 Bind(&doesnt_need_filler);
412 Goto(&merge_address);
413
414 Bind(&merge_address);
415 // Update the top.
416 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
417 IntPtrAdd(top, adjusted_size.value()));
418 return address.value();
419}
420
421Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
422 bool const new_space = !(flags & kPretenured);
423 Node* top_address = ExternalConstant(
424 new_space
425 ? ExternalReference::new_space_allocation_top_address(isolate())
426 : ExternalReference::old_space_allocation_top_address(isolate()));
427 Node* limit_address = ExternalConstant(
428 new_space
429 ? ExternalReference::new_space_allocation_limit_address(isolate())
430 : ExternalReference::old_space_allocation_limit_address(isolate()));
431
432#ifdef V8_HOST_ARCH_32_BIT
433 if (flags & kDoubleAlignment) {
434 return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address);
435 }
436#endif
437
438 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
439}
440
441Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
442 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
443}
444
445Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
446 return BitcastWordToTagged(IntPtrAdd(previous, offset));
447}
448
449Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
450 return InnerAllocate(previous, IntPtrConstant(offset));
451}
452
453Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
454 MachineType rep) {
455 return Load(rep, buffer, IntPtrConstant(offset));
456}
457
458Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
459 MachineType rep) {
460 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
461}
462
463Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
464 return Load(MachineType::Float64(), object,
465 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
466}
467
468Node* CodeStubAssembler::LoadMap(Node* object) {
469 return LoadObjectField(object, HeapObject::kMapOffset);
470}
471
472Node* CodeStubAssembler::LoadInstanceType(Node* object) {
473 return LoadMapInstanceType(LoadMap(object));
474}
475
476Node* CodeStubAssembler::LoadElements(Node* object) {
477 return LoadObjectField(object, JSObject::kElementsOffset);
478}
479
480Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
481 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
482}
483
484Node* CodeStubAssembler::LoadMapBitField(Node* map) {
485 return Load(MachineType::Uint8(), map,
486 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag));
487}
488
489Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
490 return Load(MachineType::Uint8(), map,
491 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag));
492}
493
494Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
495 return Load(MachineType::Uint32(), map,
496 IntPtrConstant(Map::kBitField3Offset - kHeapObjectTag));
497}
498
499Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
500 return Load(MachineType::Uint8(), map,
501 IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag));
502}
503
504Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
505 return LoadObjectField(map, Map::kDescriptorsOffset);
506}
507
508Node* CodeStubAssembler::LoadMapPrototype(Node* map) {
509 return LoadObjectField(map, Map::kPrototypeOffset);
510}
511
512Node* CodeStubAssembler::LoadNameHash(Node* name) {
513 return Load(MachineType::Uint32(), name,
514 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag));
515}
516
517Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) {
518 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize);
519 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2));
520 Node* total_size = IntPtrAdd(data_size, header_size);
521
522 Node* result = Allocate(total_size, kNone);
523 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex));
524 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
525 SmiTag(length));
526
527 return result;
528}
529
530Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
531 int additional_offset,
532 ParameterMode parameter_mode) {
533 int32_t header_size =
534 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
535 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
536 parameter_mode, header_size);
537 return Load(MachineType::AnyTagged(), object, offset);
538}
539
540Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
541 return Load(MachineType::Uint8(), map,
542 IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag));
543}
544
545Node* CodeStubAssembler::LoadNativeContext(Node* context) {
546 return LoadFixedArrayElement(context,
547 Int32Constant(Context::NATIVE_CONTEXT_INDEX));
548}
549
550Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
551 Node* native_context) {
552 return LoadFixedArrayElement(native_context,
553 Int32Constant(Context::ArrayMapIndex(kind)));
554}
555
556Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
557 return StoreNoWriteBarrier(
558 MachineRepresentation::kFloat64, object,
559 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
560}
561
562Node* CodeStubAssembler::StoreObjectField(
563 Node* object, int offset, Node* value) {
564 return Store(MachineRepresentation::kTagged, object,
565 IntPtrConstant(offset - kHeapObjectTag), value);
566}
567
568Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
569 Node* object, int offset, Node* value, MachineRepresentation rep) {
570 return StoreNoWriteBarrier(rep, object,
571 IntPtrConstant(offset - kHeapObjectTag), value);
572}
573
574Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
575 return StoreNoWriteBarrier(
576 MachineRepresentation::kTagged, object,
577 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
578}
579
580Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
581 Node* value,
582 WriteBarrierMode barrier_mode,
583 ParameterMode parameter_mode) {
584 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
585 barrier_mode == UPDATE_WRITE_BARRIER);
586 Node* offset =
587 ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, parameter_mode,
588 FixedArray::kHeaderSize - kHeapObjectTag);
589 MachineRepresentation rep = MachineRepresentation::kTagged;
590 if (barrier_mode == SKIP_WRITE_BARRIER) {
591 return StoreNoWriteBarrier(rep, object, offset, value);
592 } else {
593 return Store(rep, object, offset, value);
594 }
595}
596
597Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
598 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
599 Node* offset =
600 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
601 FixedArray::kHeaderSize - kHeapObjectTag);
602 MachineRepresentation rep = MachineRepresentation::kFloat64;
603 return StoreNoWriteBarrier(rep, object, offset, value);
604}
605
606Node* CodeStubAssembler::AllocateHeapNumber() {
607 Node* result = Allocate(HeapNumber::kSize, kNone);
608 StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
609 return result;
610}
611
612Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
613 Node* result = AllocateHeapNumber();
614 StoreHeapNumberValue(result, value);
615 return result;
616}
617
618Node* CodeStubAssembler::AllocateSeqOneByteString(int length) {
619 Node* result = Allocate(SeqOneByteString::SizeFor(length));
620 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
621 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
622 SmiConstant(Smi::FromInt(length)));
623 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
624 IntPtrConstant(String::kEmptyHashField));
625 return result;
626}
627
628Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) {
629 Node* result = Allocate(SeqTwoByteString::SizeFor(length));
630 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
631 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
632 SmiConstant(Smi::FromInt(length)));
633 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
634 IntPtrConstant(String::kEmptyHashField));
635 return result;
636}
637
638Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
639 Node* capacity_node, Node* length_node,
640 compiler::Node* allocation_site,
641 ParameterMode mode) {
642 bool is_double = IsFastDoubleElementsKind(kind);
643 int base_size = JSArray::kSize + FixedArray::kHeaderSize;
644 int elements_offset = JSArray::kSize;
645
646 if (allocation_site != nullptr) {
647 base_size += AllocationMemento::kSize;
648 elements_offset += AllocationMemento::kSize;
649 }
650
651 int32_t capacity;
652 bool constant_capacity = ToInt32Constant(capacity_node, capacity);
653 Node* total_size =
654 ElementOffsetFromIndex(capacity_node, kind, mode, base_size);
655
656 // Allocate both array and elements object, and initialize the JSArray.
657 Heap* heap = isolate()->heap();
658 Node* array = Allocate(total_size);
659 StoreMapNoWriteBarrier(array, array_map);
660 Node* empty_properties =
661 HeapConstant(Handle<HeapObject>(heap->empty_fixed_array()));
662 StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset,
663 empty_properties);
664 StoreObjectFieldNoWriteBarrier(
665 array, JSArray::kLengthOffset,
666 mode == SMI_PARAMETERS ? length_node : SmiTag(length_node));
667
668 if (allocation_site != nullptr) {
669 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
670 }
671
672 // Setup elements object.
673 Node* elements = InnerAllocate(array, elements_offset);
674 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
675 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
676 : heap->fixed_array_map());
677 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
678 StoreObjectFieldNoWriteBarrier(
679 elements, FixedArray::kLengthOffset,
680 mode == SMI_PARAMETERS ? capacity_node : SmiTag(capacity_node));
681
682 int const first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
683 Node* hole = HeapConstant(Handle<HeapObject>(heap->the_hole_value()));
684 Node* double_hole =
685 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
686 DCHECK_EQ(kHoleNanLower32, kHoleNanUpper32);
687 if (constant_capacity && capacity <= kElementLoopUnrollThreshold) {
688 for (int i = 0; i < capacity; ++i) {
689 if (is_double) {
690 Node* offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
691 first_element_offset);
692 // Don't use doubles to store the hole double, since manipulating the
693 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
694 // change its value on ia32 (the x87 stack is used to return values
695 // and stores to the stack silently clear the signalling bit).
696 //
697 // TODO(danno): When we have a Float32/Float64 wrapper class that
698 // preserves double bits during manipulation, remove this code/change
699 // this to an indexed Float64 store.
700 if (Is64()) {
701 StoreNoWriteBarrier(MachineRepresentation::kWord64, elements, offset,
702 double_hole);
703 } else {
704 StoreNoWriteBarrier(MachineRepresentation::kWord32, elements, offset,
705 double_hole);
706 offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
707 first_element_offset + kPointerSize);
708 StoreNoWriteBarrier(MachineRepresentation::kWord32, elements, offset,
709 double_hole);
710 }
711 } else {
712 StoreFixedArrayElement(elements, Int32Constant(i), hole,
713 SKIP_WRITE_BARRIER);
714 }
715 }
716 } else {
717 // TODO(danno): Add a loop for initialization
718 UNIMPLEMENTED();
719 }
720
721 return array;
722}
723
724void CodeStubAssembler::InitializeAllocationMemento(
725 compiler::Node* base_allocation, int base_allocation_size,
726 compiler::Node* allocation_site) {
727 StoreObjectFieldNoWriteBarrier(
728 base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
729 HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
730 StoreObjectFieldNoWriteBarrier(
731 base_allocation,
732 AllocationMemento::kAllocationSiteOffset + base_allocation_size,
733 allocation_site);
734 if (FLAG_allocation_site_pretenuring) {
735 Node* count = LoadObjectField(allocation_site,
736 AllocationSite::kPretenureCreateCountOffset);
737 Node* incremented_count = IntPtrAdd(count, SmiConstant(Smi::FromInt(1)));
738 StoreObjectFieldNoWriteBarrier(allocation_site,
739 AllocationSite::kPretenureCreateCountOffset,
740 incremented_count);
741 }
742}
743
744Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
745 // We might need to loop once due to ToNumber conversion.
746 Variable var_value(this, MachineRepresentation::kTagged),
747 var_result(this, MachineRepresentation::kFloat64);
748 Label loop(this, &var_value), done_loop(this, &var_result);
749 var_value.Bind(value);
750 Goto(&loop);
751 Bind(&loop);
752 {
753 // Load the current {value}.
754 value = var_value.value();
755
756 // Check if the {value} is a Smi or a HeapObject.
757 Label if_valueissmi(this), if_valueisnotsmi(this);
758 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
759
760 Bind(&if_valueissmi);
761 {
762 // Convert the Smi {value}.
763 var_result.Bind(SmiToFloat64(value));
764 Goto(&done_loop);
765 }
766
767 Bind(&if_valueisnotsmi);
768 {
769 // Check if {value} is a HeapNumber.
770 Label if_valueisheapnumber(this),
771 if_valueisnotheapnumber(this, Label::kDeferred);
772 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
773 &if_valueisheapnumber, &if_valueisnotheapnumber);
774
775 Bind(&if_valueisheapnumber);
776 {
777 // Load the floating point value.
778 var_result.Bind(LoadHeapNumberValue(value));
779 Goto(&done_loop);
780 }
781
782 Bind(&if_valueisnotheapnumber);
783 {
784 // Convert the {value} to a Number first.
785 Callable callable = CodeFactory::NonNumberToNumber(isolate());
786 var_value.Bind(CallStub(callable, context, value));
787 Goto(&loop);
788 }
789 }
790 }
791 Bind(&done_loop);
792 return var_result.value();
793}
794
795Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
796 // We might need to loop once due to ToNumber conversion.
797 Variable var_value(this, MachineRepresentation::kTagged),
798 var_result(this, MachineRepresentation::kWord32);
799 Label loop(this, &var_value), done_loop(this, &var_result);
800 var_value.Bind(value);
801 Goto(&loop);
802 Bind(&loop);
803 {
804 // Load the current {value}.
805 value = var_value.value();
806
807 // Check if the {value} is a Smi or a HeapObject.
808 Label if_valueissmi(this), if_valueisnotsmi(this);
809 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
810
811 Bind(&if_valueissmi);
812 {
813 // Convert the Smi {value}.
814 var_result.Bind(SmiToWord32(value));
815 Goto(&done_loop);
816 }
817
818 Bind(&if_valueisnotsmi);
819 {
820 // Check if {value} is a HeapNumber.
821 Label if_valueisheapnumber(this),
822 if_valueisnotheapnumber(this, Label::kDeferred);
823 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
824 &if_valueisheapnumber, &if_valueisnotheapnumber);
825
826 Bind(&if_valueisheapnumber);
827 {
828 // Truncate the floating point value.
829 var_result.Bind(TruncateHeapNumberValueToWord32(value));
830 Goto(&done_loop);
831 }
832
833 Bind(&if_valueisnotheapnumber);
834 {
835 // Convert the {value} to a Number first.
836 Callable callable = CodeFactory::NonNumberToNumber(isolate());
837 var_value.Bind(CallStub(callable, context, value));
838 Goto(&loop);
839 }
840 }
841 }
842 Bind(&done_loop);
843 return var_result.value();
844}
845
846Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
847 Node* value = LoadHeapNumberValue(object);
848 return TruncateFloat64ToWord32(value);
849}
850
851Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
852 Node* value32 = RoundFloat64ToInt32(value);
853 Node* value64 = ChangeInt32ToFloat64(value32);
854
855 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
856
857 Label if_valueisequal(this), if_valueisnotequal(this);
858 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
859 Bind(&if_valueisequal);
860 {
861 GotoUnless(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
862 BranchIfInt32LessThan(Float64ExtractHighWord32(value), Int32Constant(0),
863 &if_valueisheapnumber, &if_valueisint32);
864 }
865 Bind(&if_valueisnotequal);
866 Goto(&if_valueisheapnumber);
867
868 Variable var_result(this, MachineRepresentation::kTagged);
869 Bind(&if_valueisint32);
870 {
871 if (Is64()) {
872 Node* result = SmiTag(ChangeInt32ToInt64(value32));
873 var_result.Bind(result);
874 Goto(&if_join);
875 } else {
876 Node* pair = Int32AddWithOverflow(value32, value32);
877 Node* overflow = Projection(1, pair);
878 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
879 Branch(overflow, &if_overflow, &if_notoverflow);
880 Bind(&if_overflow);
881 Goto(&if_valueisheapnumber);
882 Bind(&if_notoverflow);
883 {
884 Node* result = Projection(0, pair);
885 var_result.Bind(result);
886 Goto(&if_join);
887 }
888 }
889 }
890 Bind(&if_valueisheapnumber);
891 {
892 Node* result = AllocateHeapNumberWithValue(value);
893 var_result.Bind(result);
894 Goto(&if_join);
895 }
896 Bind(&if_join);
897 return var_result.value();
898}
899
900Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
901 if (Is64()) {
902 return SmiTag(ChangeInt32ToInt64(value));
903 }
904 Variable var_result(this, MachineRepresentation::kTagged);
905 Node* pair = Int32AddWithOverflow(value, value);
906 Node* overflow = Projection(1, pair);
907 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
908 if_join(this);
909 Branch(overflow, &if_overflow, &if_notoverflow);
910 Bind(&if_overflow);
911 {
912 Node* value64 = ChangeInt32ToFloat64(value);
913 Node* result = AllocateHeapNumberWithValue(value64);
914 var_result.Bind(result);
915 }
916 Goto(&if_join);
917 Bind(&if_notoverflow);
918 {
919 Node* result = Projection(0, pair);
920 var_result.Bind(result);
921 }
922 Goto(&if_join);
923 Bind(&if_join);
924 return var_result.value();
925}
926
927Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
928 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
929 if_join(this);
930 Variable var_result(this, MachineRepresentation::kTagged);
931 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
932 Branch(Int32LessThan(value, Int32Constant(0)), &if_overflow,
933 &if_not_overflow);
934 Bind(&if_not_overflow);
935 {
936 if (Is64()) {
937 var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
938 } else {
939 // If tagging {value} results in an overflow, we need to use a HeapNumber
940 // to represent it.
941 Node* pair = Int32AddWithOverflow(value, value);
942 Node* overflow = Projection(1, pair);
943 GotoIf(overflow, &if_overflow);
944
945 Node* result = Projection(0, pair);
946 var_result.Bind(result);
947 }
948 }
949 Goto(&if_join);
950
951 Bind(&if_overflow);
952 {
953 Node* float64_value = ChangeUint32ToFloat64(value);
954 var_result.Bind(AllocateHeapNumberWithValue(float64_value));
955 }
956 Goto(&if_join);
957
958 Bind(&if_join);
959 return var_result.value();
960}
961
962Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
963 char const* method_name) {
964 Variable var_value(this, MachineRepresentation::kTagged);
965 var_value.Bind(value);
966
967 // Check if the {value} is a Smi or a HeapObject.
968 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
969 if_valueisstring(this);
970 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
971 Bind(&if_valueisnotsmi);
972 {
973 // Load the instance type of the {value}.
974 Node* value_instance_type = LoadInstanceType(value);
975
976 // Check if the {value} is already String.
977 Label if_valueisnotstring(this, Label::kDeferred);
978 Branch(
979 Int32LessThan(value_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
980 &if_valueisstring, &if_valueisnotstring);
981 Bind(&if_valueisnotstring);
982 {
983 // Check if the {value} is null.
984 Label if_valueisnullorundefined(this, Label::kDeferred),
985 if_valueisnotnullorundefined(this, Label::kDeferred),
986 if_valueisnotnull(this, Label::kDeferred);
987 Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
988 &if_valueisnotnull);
989 Bind(&if_valueisnotnull);
990 {
991 // Check if the {value} is undefined.
992 Branch(WordEqual(value, UndefinedConstant()),
993 &if_valueisnullorundefined, &if_valueisnotnullorundefined);
994 Bind(&if_valueisnotnullorundefined);
995 {
996 // Convert the {value} to a String.
997 Callable callable = CodeFactory::ToString(isolate());
998 var_value.Bind(CallStub(callable, context, value));
999 Goto(&if_valueisstring);
1000 }
1001 }
1002
1003 Bind(&if_valueisnullorundefined);
1004 {
1005 // The {value} is either null or undefined.
1006 CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
1007 HeapConstant(factory()->NewStringFromAsciiChecked(
1008 method_name, TENURED)));
1009 Goto(&if_valueisstring); // Never reached.
1010 }
1011 }
1012 }
1013 Bind(&if_valueissmi);
1014 {
1015 // The {value} is a Smi, convert it to a String.
1016 Callable callable = CodeFactory::NumberToString(isolate());
1017 var_value.Bind(CallStub(callable, context, value));
1018 Goto(&if_valueisstring);
1019 }
1020 Bind(&if_valueisstring);
1021 return var_value.value();
1022}
1023
1024Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
1025 // Translate the {index} into a Word.
1026 index = SmiToWord(index);
1027
1028 // We may need to loop in case of cons or sliced strings.
1029 Variable var_index(this, MachineType::PointerRepresentation());
1030 Variable var_result(this, MachineRepresentation::kWord32);
1031 Variable var_string(this, MachineRepresentation::kTagged);
1032 Variable* loop_vars[] = {&var_index, &var_string};
1033 Label done_loop(this, &var_result), loop(this, 2, loop_vars);
1034 var_string.Bind(string);
1035 var_index.Bind(index);
1036 Goto(&loop);
1037 Bind(&loop);
1038 {
1039 // Load the current {index}.
1040 index = var_index.value();
1041
1042 // Load the current {string}.
1043 string = var_string.value();
1044
1045 // Load the instance type of the {string}.
1046 Node* string_instance_type = LoadInstanceType(string);
1047
1048 // Check if the {string} is a SeqString.
1049 Label if_stringissequential(this), if_stringisnotsequential(this);
1050 Branch(Word32Equal(Word32And(string_instance_type,
1051 Int32Constant(kStringRepresentationMask)),
1052 Int32Constant(kSeqStringTag)),
1053 &if_stringissequential, &if_stringisnotsequential);
1054
1055 Bind(&if_stringissequential);
1056 {
1057 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
1058 Label if_stringistwobyte(this), if_stringisonebyte(this);
1059 Branch(Word32Equal(Word32And(string_instance_type,
1060 Int32Constant(kStringEncodingMask)),
1061 Int32Constant(kTwoByteStringTag)),
1062 &if_stringistwobyte, &if_stringisonebyte);
1063
1064 Bind(&if_stringisonebyte);
1065 {
1066 var_result.Bind(
1067 Load(MachineType::Uint8(), string,
1068 IntPtrAdd(index, IntPtrConstant(SeqOneByteString::kHeaderSize -
1069 kHeapObjectTag))));
1070 Goto(&done_loop);
1071 }
1072
1073 Bind(&if_stringistwobyte);
1074 {
1075 var_result.Bind(
1076 Load(MachineType::Uint16(), string,
1077 IntPtrAdd(WordShl(index, IntPtrConstant(1)),
1078 IntPtrConstant(SeqTwoByteString::kHeaderSize -
1079 kHeapObjectTag))));
1080 Goto(&done_loop);
1081 }
1082 }
1083
1084 Bind(&if_stringisnotsequential);
1085 {
1086 // Check if the {string} is a ConsString.
1087 Label if_stringiscons(this), if_stringisnotcons(this);
1088 Branch(Word32Equal(Word32And(string_instance_type,
1089 Int32Constant(kStringRepresentationMask)),
1090 Int32Constant(kConsStringTag)),
1091 &if_stringiscons, &if_stringisnotcons);
1092
1093 Bind(&if_stringiscons);
1094 {
1095 // Check whether the right hand side is the empty string (i.e. if
1096 // this is really a flat string in a cons string). If that is not
1097 // the case we flatten the string first.
1098 Label if_rhsisempty(this), if_rhsisnotempty(this, Label::kDeferred);
1099 Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
1100 Branch(WordEqual(rhs, EmptyStringConstant()), &if_rhsisempty,
1101 &if_rhsisnotempty);
1102
1103 Bind(&if_rhsisempty);
1104 {
1105 // Just operate on the left hand side of the {string}.
1106 var_string.Bind(LoadObjectField(string, ConsString::kFirstOffset));
1107 Goto(&loop);
1108 }
1109
1110 Bind(&if_rhsisnotempty);
1111 {
1112 // Flatten the {string} and lookup in the resulting string.
1113 var_string.Bind(CallRuntime(Runtime::kFlattenString,
1114 NoContextConstant(), string));
1115 Goto(&loop);
1116 }
1117 }
1118
1119 Bind(&if_stringisnotcons);
1120 {
1121 // Check if the {string} is an ExternalString.
1122 Label if_stringisexternal(this), if_stringisnotexternal(this);
1123 Branch(Word32Equal(Word32And(string_instance_type,
1124 Int32Constant(kStringRepresentationMask)),
1125 Int32Constant(kExternalStringTag)),
1126 &if_stringisexternal, &if_stringisnotexternal);
1127
1128 Bind(&if_stringisexternal);
1129 {
1130 // Check if the {string} is a short external string.
1131 Label if_stringisshort(this),
1132 if_stringisnotshort(this, Label::kDeferred);
1133 Branch(Word32Equal(Word32And(string_instance_type,
1134 Int32Constant(kShortExternalStringMask)),
1135 Int32Constant(0)),
1136 &if_stringisshort, &if_stringisnotshort);
1137
1138 Bind(&if_stringisshort);
1139 {
1140 // Load the actual resource data from the {string}.
1141 Node* string_resource_data =
1142 LoadObjectField(string, ExternalString::kResourceDataOffset,
1143 MachineType::Pointer());
1144
1145 // Check if the {string} is a TwoByteExternalString or a
1146 // OneByteExternalString.
1147 Label if_stringistwobyte(this), if_stringisonebyte(this);
1148 Branch(Word32Equal(Word32And(string_instance_type,
1149 Int32Constant(kStringEncodingMask)),
1150 Int32Constant(kTwoByteStringTag)),
1151 &if_stringistwobyte, &if_stringisonebyte);
1152
1153 Bind(&if_stringisonebyte);
1154 {
1155 var_result.Bind(
1156 Load(MachineType::Uint8(), string_resource_data, index));
1157 Goto(&done_loop);
1158 }
1159
1160 Bind(&if_stringistwobyte);
1161 {
1162 var_result.Bind(Load(MachineType::Uint16(), string_resource_data,
1163 WordShl(index, IntPtrConstant(1))));
1164 Goto(&done_loop);
1165 }
1166 }
1167
1168 Bind(&if_stringisnotshort);
1169 {
1170 // The {string} might be compressed, call the runtime.
1171 var_result.Bind(SmiToWord32(
1172 CallRuntime(Runtime::kExternalStringGetChar,
1173 NoContextConstant(), string, SmiTag(index))));
1174 Goto(&done_loop);
1175 }
1176 }
1177
1178 Bind(&if_stringisnotexternal);
1179 {
1180 // The {string} is a SlicedString, continue with its parent.
1181 Node* string_offset =
1182 SmiToWord(LoadObjectField(string, SlicedString::kOffsetOffset));
1183 Node* string_parent =
1184 LoadObjectField(string, SlicedString::kParentOffset);
1185 var_index.Bind(IntPtrAdd(index, string_offset));
1186 var_string.Bind(string_parent);
1187 Goto(&loop);
1188 }
1189 }
1190 }
1191 }
1192
1193 Bind(&done_loop);
1194 return var_result.value();
1195}
1196
1197Node* CodeStubAssembler::StringFromCharCode(Node* code) {
1198 Variable var_result(this, MachineRepresentation::kTagged);
1199
1200 // Check if the {code} is a one-byte char code.
1201 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
1202 if_done(this);
1203 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
1204 &if_codeisonebyte, &if_codeistwobyte);
1205 Bind(&if_codeisonebyte);
1206 {
1207 // Load the isolate wide single character string cache.
1208 Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
1209
1210 // Check if we have an entry for the {code} in the single character string
1211 // cache already.
1212 Label if_entryisundefined(this, Label::kDeferred),
1213 if_entryisnotundefined(this);
1214 Node* entry = LoadFixedArrayElement(cache, code);
1215 Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
1216 &if_entryisnotundefined);
1217
1218 Bind(&if_entryisundefined);
1219 {
1220 // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
1221 Node* result = AllocateSeqOneByteString(1);
1222 StoreNoWriteBarrier(
1223 MachineRepresentation::kWord8, result,
1224 IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
1225 StoreFixedArrayElement(cache, code, result);
1226 var_result.Bind(result);
1227 Goto(&if_done);
1228 }
1229
1230 Bind(&if_entryisnotundefined);
1231 {
1232 // Return the entry from the {cache}.
1233 var_result.Bind(entry);
1234 Goto(&if_done);
1235 }
1236 }
1237
1238 Bind(&if_codeistwobyte);
1239 {
1240 // Allocate a new SeqTwoByteString for {code}.
1241 Node* result = AllocateSeqTwoByteString(1);
1242 StoreNoWriteBarrier(
1243 MachineRepresentation::kWord16, result,
1244 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
1245 var_result.Bind(result);
1246 Goto(&if_done);
1247 }
1248
1249 Bind(&if_done);
1250 return var_result.value();
1251}
1252
1253Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
1254 uint32_t mask) {
1255 return Word32Shr(Word32And(word32, Int32Constant(mask)),
1256 Int32Constant(shift));
1257}
1258
1259void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
1260 Variable* var_index, Label* if_keyisunique,
1261 Label* call_runtime) {
1262 DCHECK_EQ(MachineRepresentation::kWord32, var_index->rep());
1263
1264 Label if_keyissmi(this), if_keyisnotsmi(this);
1265 Branch(WordIsSmi(key), &if_keyissmi, &if_keyisnotsmi);
1266 Bind(&if_keyissmi);
1267 {
1268 // Negative smi keys are named properties. Handle in the runtime.
1269 Label if_keyispositive(this);
1270 Branch(WordIsPositiveSmi(key), &if_keyispositive, call_runtime);
1271 Bind(&if_keyispositive);
1272
1273 var_index->Bind(SmiToWord32(key));
1274 Goto(if_keyisindex);
1275 }
1276
1277 Bind(&if_keyisnotsmi);
1278
1279 Node* key_instance_type = LoadInstanceType(key);
1280 Label if_keyisnotsymbol(this);
1281 Branch(Word32Equal(key_instance_type, Int32Constant(SYMBOL_TYPE)),
1282 if_keyisunique, &if_keyisnotsymbol);
1283 Bind(&if_keyisnotsymbol);
1284 {
1285 Label if_keyisinternalized(this);
1286 Node* bits =
1287 WordAnd(key_instance_type,
1288 Int32Constant(kIsNotStringMask | kIsNotInternalizedMask));
1289 Branch(Word32Equal(bits, Int32Constant(kStringTag | kInternalizedTag)),
1290 &if_keyisinternalized, call_runtime);
1291 Bind(&if_keyisinternalized);
1292
1293 // Check whether the key is an array index passed in as string. Handle
1294 // uniform with smi keys if so.
1295 // TODO(verwaest): Also support non-internalized strings.
1296 Node* hash = LoadNameHash(key);
1297 Node* bit =
1298 Word32And(hash, Int32Constant(internal::Name::kIsNotArrayIndexMask));
1299 Label if_isarrayindex(this);
1300 Branch(Word32Equal(bit, Int32Constant(0)), &if_isarrayindex,
1301 if_keyisunique);
1302 Bind(&if_isarrayindex);
1303 var_index->Bind(BitFieldDecode<internal::Name::ArrayIndexValueBits>(hash));
1304 Goto(if_keyisindex);
1305 }
1306}
1307
1308void CodeStubAssembler::TryLookupProperty(Node* object, Node* map,
1309 Node* instance_type, Node* name,
1310 Label* if_found, Label* if_not_found,
1311 Label* call_runtime) {
1312 {
1313 Label if_objectissimple(this);
1314 Branch(Int32LessThanOrEqual(instance_type,
1315 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
1316 call_runtime, &if_objectissimple);
1317 Bind(&if_objectissimple);
1318 }
1319
1320 // TODO(verwaest): Perform a dictonary lookup on slow-mode receivers.
1321 Node* bit_field3 = LoadMapBitField3(map);
1322 Node* bit = BitFieldDecode<Map::DictionaryMap>(bit_field3);
1323 Label if_isfastmap(this);
1324 Branch(Word32Equal(bit, Int32Constant(0)), &if_isfastmap, call_runtime);
1325 Bind(&if_isfastmap);
1326 Node* nof = BitFieldDecode<Map::NumberOfOwnDescriptorsBits>(bit_field3);
1327 // Bail out to the runtime for large numbers of own descriptors. The stub only
1328 // does linear search, which becomes too expensive in that case.
1329 {
1330 static const int32_t kMaxLinear = 210;
1331 Label above_max(this), below_max(this);
1332 Branch(Int32LessThanOrEqual(nof, Int32Constant(kMaxLinear)), &below_max,
1333 call_runtime);
1334 Bind(&below_max);
1335 }
1336 Node* descriptors = LoadMapDescriptors(map);
1337
1338 Variable var_descriptor(this, MachineRepresentation::kWord32);
1339 Label loop(this, &var_descriptor);
1340 var_descriptor.Bind(Int32Constant(0));
1341 Goto(&loop);
1342 Bind(&loop);
1343 {
1344 Node* index = var_descriptor.value();
1345 Node* offset = Int32Constant(DescriptorArray::ToKeyIndex(0));
1346 Node* factor = Int32Constant(DescriptorArray::kDescriptorSize);
1347 Label if_notdone(this);
1348 Branch(Word32Equal(index, nof), if_not_found, &if_notdone);
1349 Bind(&if_notdone);
1350 {
1351 Node* array_index = Int32Add(offset, Int32Mul(index, factor));
1352 Node* current = LoadFixedArrayElement(descriptors, array_index);
1353 Label if_unequal(this);
1354 Branch(WordEqual(current, name), if_found, &if_unequal);
1355 Bind(&if_unequal);
1356
1357 var_descriptor.Bind(Int32Add(index, Int32Constant(1)));
1358 Goto(&loop);
1359 }
1360 }
1361}
1362
1363void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
1364 Node* instance_type, Node* index,
1365 Label* if_found, Label* if_not_found,
1366 Label* call_runtime) {
1367 {
1368 Label if_objectissimple(this);
1369 Branch(Int32LessThanOrEqual(instance_type,
1370 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
1371 call_runtime, &if_objectissimple);
1372 Bind(&if_objectissimple);
1373 }
1374
1375 Node* bit_field2 = LoadMapBitField2(map);
1376 Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
1377
1378 // TODO(verwaest): Support other elements kinds as well.
1379 Label if_isobjectorsmi(this);
1380 Branch(
1381 Int32LessThanOrEqual(elements_kind, Int32Constant(FAST_HOLEY_ELEMENTS)),
1382 &if_isobjectorsmi, call_runtime);
1383 Bind(&if_isobjectorsmi);
1384 {
1385 Node* elements = LoadElements(object);
1386 Node* length = LoadFixedArrayBaseLength(elements);
1387
1388 Label if_iskeyinrange(this);
1389 Branch(Int32LessThan(index, SmiToWord32(length)), &if_iskeyinrange,
1390 if_not_found);
1391
1392 Bind(&if_iskeyinrange);
1393 Node* element = LoadFixedArrayElement(elements, index);
1394 Node* the_hole = LoadRoot(Heap::kTheHoleValueRootIndex);
1395 Branch(WordEqual(element, the_hole), if_not_found, if_found);
1396 }
1397}
1398
1399Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
1400 Node* object) {
1401 Variable var_result(this, MachineRepresentation::kTagged);
1402 Label return_false(this), return_true(this),
1403 return_runtime(this, Label::kDeferred), return_result(this);
1404
1405 // Goto runtime if {object} is a Smi.
1406 GotoIf(WordIsSmi(object), &return_runtime);
1407
1408 // Load map of {object}.
1409 Node* object_map = LoadMap(object);
1410
1411 // Lookup the {callable} and {object} map in the global instanceof cache.
1412 // Note: This is safe because we clear the global instanceof cache whenever
1413 // we change the prototype of any object.
1414 Node* instanceof_cache_function =
1415 LoadRoot(Heap::kInstanceofCacheFunctionRootIndex);
1416 Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
1417 {
1418 Label instanceof_cache_miss(this);
1419 GotoUnless(WordEqual(instanceof_cache_function, callable),
1420 &instanceof_cache_miss);
1421 GotoUnless(WordEqual(instanceof_cache_map, object_map),
1422 &instanceof_cache_miss);
1423 var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
1424 Goto(&return_result);
1425 Bind(&instanceof_cache_miss);
1426 }
1427
1428 // Goto runtime if {callable} is a Smi.
1429 GotoIf(WordIsSmi(callable), &return_runtime);
1430
1431 // Load map of {callable}.
1432 Node* callable_map = LoadMap(callable);
1433
1434 // Goto runtime if {callable} is not a JSFunction.
1435 Node* callable_instance_type = LoadMapInstanceType(callable_map);
1436 GotoUnless(
1437 Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
1438 &return_runtime);
1439
1440 // Goto runtime if {callable} is not a constructor or has
1441 // a non-instance "prototype".
1442 Node* callable_bitfield = LoadMapBitField(callable_map);
1443 GotoUnless(
1444 Word32Equal(Word32And(callable_bitfield,
1445 Int32Constant((1 << Map::kHasNonInstancePrototype) |
1446 (1 << Map::kIsConstructor))),
1447 Int32Constant(1 << Map::kIsConstructor)),
1448 &return_runtime);
1449
1450 // Get the "prototype" (or initial map) of the {callable}.
1451 Node* callable_prototype =
1452 LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
1453 {
1454 Variable var_callable_prototype(this, MachineRepresentation::kTagged);
1455 Label callable_prototype_valid(this);
1456 var_callable_prototype.Bind(callable_prototype);
1457
1458 // Resolve the "prototype" if the {callable} has an initial map. Afterwards
1459 // the {callable_prototype} will be either the JSReceiver prototype object
1460 // or the hole value, which means that no instances of the {callable} were
1461 // created so far and hence we should return false.
1462 Node* callable_prototype_instance_type =
1463 LoadInstanceType(callable_prototype);
1464 GotoUnless(
1465 Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
1466 &callable_prototype_valid);
1467 var_callable_prototype.Bind(
1468 LoadObjectField(callable_prototype, Map::kPrototypeOffset));
1469 Goto(&callable_prototype_valid);
1470 Bind(&callable_prototype_valid);
1471 callable_prototype = var_callable_prototype.value();
1472 }
1473
1474 // Update the global instanceof cache with the current {object} map and
1475 // {callable}. The cached answer will be set when it is known below.
1476 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, callable);
1477 StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
1478
1479 // Loop through the prototype chain looking for the {callable} prototype.
1480 Variable var_object_map(this, MachineRepresentation::kTagged);
1481 var_object_map.Bind(object_map);
1482 Label loop(this, &var_object_map);
1483 Goto(&loop);
1484 Bind(&loop);
1485 {
1486 Node* object_map = var_object_map.value();
1487
1488 // Check if the current {object} needs to be access checked.
1489 Node* object_bitfield = LoadMapBitField(object_map);
1490 GotoUnless(
1491 Word32Equal(Word32And(object_bitfield,
1492 Int32Constant(1 << Map::kIsAccessCheckNeeded)),
1493 Int32Constant(0)),
1494 &return_runtime);
1495
1496 // Check if the current {object} is a proxy.
1497 Node* object_instance_type = LoadMapInstanceType(object_map);
1498 GotoIf(Word32Equal(object_instance_type, Int32Constant(JS_PROXY_TYPE)),
1499 &return_runtime);
1500
1501 // Check the current {object} prototype.
1502 Node* object_prototype = LoadMapPrototype(object_map);
1503 GotoIf(WordEqual(object_prototype, callable_prototype), &return_true);
1504 GotoIf(WordEqual(object_prototype, NullConstant()), &return_false);
1505
1506 // Continue with the prototype.
1507 var_object_map.Bind(LoadMap(object_prototype));
1508 Goto(&loop);
1509 }
1510
1511 Bind(&return_true);
1512 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(true));
1513 var_result.Bind(BooleanConstant(true));
1514 Goto(&return_result);
1515
1516 Bind(&return_false);
1517 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(false));
1518 var_result.Bind(BooleanConstant(false));
1519 Goto(&return_result);
1520
1521 Bind(&return_runtime);
1522 {
1523 // Invalidate the global instanceof cache.
1524 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, SmiConstant(0));
1525 // Fallback to the runtime implementation.
1526 var_result.Bind(
1527 CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
1528 }
1529 Goto(&return_result);
1530
1531 Bind(&return_result);
1532 return var_result.value();
1533}
1534
1535compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
1536 ElementsKind kind,
1537 ParameterMode mode,
1538 int base_size) {
1539 bool is_double = IsFastDoubleElementsKind(kind);
1540 int element_size_shift = is_double ? kDoubleSizeLog2 : kPointerSizeLog2;
1541 int element_size = 1 << element_size_shift;
1542 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
1543 int32_t index = 0;
1544 bool constant_index = false;
1545 if (mode == SMI_PARAMETERS) {
1546 element_size_shift -= kSmiShiftBits;
1547 intptr_t temp = 0;
1548 constant_index = ToIntPtrConstant(index_node, temp);
1549 index = temp >> kSmiShiftBits;
1550 } else {
1551 constant_index = ToInt32Constant(index_node, index);
1552 }
1553 if (constant_index) {
1554 return IntPtrConstant(base_size + element_size * index);
1555 }
1556 if (Is64() && mode == INTEGER_PARAMETERS) {
1557 index_node = ChangeInt32ToInt64(index_node);
1558 }
1559 if (base_size == 0) {
1560 return (element_size_shift >= 0)
1561 ? WordShl(index_node, IntPtrConstant(element_size_shift))
1562 : WordShr(index_node, IntPtrConstant(-element_size_shift));
1563 }
1564 return IntPtrAdd(
1565 IntPtrConstant(base_size),
1566 (element_size_shift >= 0)
1567 ? WordShl(index_node, IntPtrConstant(element_size_shift))
1568 : WordShr(index_node, IntPtrConstant(-element_size_shift)));
1569}
1570
1571} // namespace internal
1572} // namespace v8