blob: fbc064c231a7bd24bb1388e1a50a1b3107727f80 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/js-native-context-specialization.h"
6
7#include "src/accessors.h"
8#include "src/code-factory.h"
9#include "src/compilation-dependencies.h"
10#include "src/compiler/access-builder.h"
11#include "src/compiler/access-info.h"
12#include "src/compiler/js-graph.h"
13#include "src/compiler/js-operator.h"
14#include "src/compiler/linkage.h"
15#include "src/compiler/node-matchers.h"
16#include "src/field-index-inl.h"
17#include "src/isolate-inl.h"
18#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
19#include "src/type-cache.h"
20#include "src/type-feedback-vector.h"
21
22namespace v8 {
23namespace internal {
24namespace compiler {
25
26JSNativeContextSpecialization::JSNativeContextSpecialization(
27 Editor* editor, JSGraph* jsgraph, Flags flags,
28 MaybeHandle<Context> native_context, CompilationDependencies* dependencies,
29 Zone* zone)
30 : AdvancedReducer(editor),
31 jsgraph_(jsgraph),
32 flags_(flags),
33 native_context_(native_context),
34 dependencies_(dependencies),
35 zone_(zone),
36 type_cache_(TypeCache::Get()) {}
37
38
39Reduction JSNativeContextSpecialization::Reduce(Node* node) {
40 switch (node->opcode()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 case IrOpcode::kJSLoadContext:
42 return ReduceJSLoadContext(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 case IrOpcode::kJSLoadNamed:
44 return ReduceJSLoadNamed(node);
45 case IrOpcode::kJSStoreNamed:
46 return ReduceJSStoreNamed(node);
47 case IrOpcode::kJSLoadProperty:
48 return ReduceJSLoadProperty(node);
49 case IrOpcode::kJSStoreProperty:
50 return ReduceJSStoreProperty(node);
51 default:
52 break;
53 }
54 return NoChange();
55}
56
Ben Murdoch097c5b22016-05-18 11:27:45 +010057Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
58 DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
59 ContextAccess const& access = ContextAccessOf(node->op());
60 Handle<Context> native_context;
61 // Specialize JSLoadContext(NATIVE_CONTEXT_INDEX) to the known native
62 // context (if any), so we can constant-fold those fields, which is
63 // safe, since the NATIVE_CONTEXT_INDEX slot is always immutable.
64 if (access.index() == Context::NATIVE_CONTEXT_INDEX &&
65 GetNativeContext(node).ToHandle(&native_context)) {
66 Node* value = jsgraph()->HeapConstant(native_context);
67 ReplaceWithValue(node, value);
68 return Replace(value);
69 }
70 return NoChange();
71}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072
73Reduction JSNativeContextSpecialization::ReduceNamedAccess(
74 Node* node, Node* value, MapHandleList const& receiver_maps,
75 Handle<Name> name, AccessMode access_mode, LanguageMode language_mode,
76 Node* index) {
77 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
78 node->opcode() == IrOpcode::kJSStoreNamed ||
79 node->opcode() == IrOpcode::kJSLoadProperty ||
80 node->opcode() == IrOpcode::kJSStoreProperty);
81 Node* receiver = NodeProperties::GetValueInput(node, 0);
82 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
83 Node* effect = NodeProperties::GetEffectInput(node);
84 Node* control = NodeProperties::GetControlInput(node);
85
86 // Not much we can do if deoptimization support is disabled.
87 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
88
89 // Retrieve the native context from the given {node}.
90 Handle<Context> native_context;
91 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
92
93 // Compute property access infos for the receiver maps.
94 AccessInfoFactory access_info_factory(dependencies(), native_context,
95 graph()->zone());
96 ZoneVector<PropertyAccessInfo> access_infos(zone());
97 if (!access_info_factory.ComputePropertyAccessInfos(
98 receiver_maps, name, access_mode, &access_infos)) {
99 return NoChange();
100 }
101
102 // Nothing to do if we have no non-deprecated maps.
103 if (access_infos.empty()) return NoChange();
104
105 // The final states for every polymorphic branch. We join them with
106 // Merge++Phi+EffectPhi at the bottom.
107 ZoneVector<Node*> values(zone());
108 ZoneVector<Node*> effects(zone());
109 ZoneVector<Node*> controls(zone());
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 // Ensure that {index} matches the specified {name} (if {index} is given).
112 if (index != nullptr) {
113 Node* check = graph()->NewNode(simplified()->ReferenceEqual(Type::Name()),
114 index, jsgraph()->HeapConstant(name));
Ben Murdochda12d292016-06-02 14:46:10 +0100115 control = graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
116 effect, control);
117 }
118
119 // Check if {receiver} may be a number.
120 bool receiverissmi_possible = false;
121 for (PropertyAccessInfo const& access_info : access_infos) {
122 if (access_info.receiver_type()->Is(Type::Number())) {
123 receiverissmi_possible = true;
124 break;
125 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 }
127
128 // Ensure that {receiver} is a heap object.
129 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
Ben Murdochda12d292016-06-02 14:46:10 +0100130 Node* receiverissmi_control = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000131 Node* receiverissmi_effect = effect;
Ben Murdochda12d292016-06-02 14:46:10 +0100132 if (receiverissmi_possible) {
133 Node* branch = graph()->NewNode(common()->Branch(), check, control);
134 control = graph()->NewNode(common()->IfFalse(), branch);
135 receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
136 receiverissmi_effect = effect;
137 } else {
138 control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
139 effect, control);
140 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000141
142 // Load the {receiver} map. The resulting effect is the dominating effect for
143 // all (polymorphic) branches.
144 Node* receiver_map = effect =
145 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
146 receiver, effect, control);
147
148 // Generate code for the various different property access patterns.
149 Node* fallthrough_control = control;
Ben Murdochda12d292016-06-02 14:46:10 +0100150 for (size_t j = 0; j < access_infos.size(); ++j) {
151 PropertyAccessInfo const& access_info = access_infos[j];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 Node* this_value = value;
153 Node* this_receiver = receiver;
154 Node* this_effect = effect;
155 Node* this_control;
156
157 // Perform map check on {receiver}.
158 Type* receiver_type = access_info.receiver_type();
159 if (receiver_type->Is(Type::String())) {
Ben Murdochc5610432016-08-08 18:44:38 +0100160 Node* check = graph()->NewNode(simplified()->ObjectIsString(), receiver);
Ben Murdochda12d292016-06-02 14:46:10 +0100161 if (j == access_infos.size() - 1) {
162 this_control =
163 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
164 this_effect, fallthrough_control);
165 fallthrough_control = nullptr;
166 } else {
167 Node* branch =
168 graph()->NewNode(common()->Branch(), check, fallthrough_control);
169 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
170 this_control = graph()->NewNode(common()->IfTrue(), branch);
171 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000172 } else {
173 // Emit a (sequence of) map checks for other {receiver}s.
174 ZoneVector<Node*> this_controls(zone());
175 ZoneVector<Node*> this_effects(zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100176 int num_classes = access_info.receiver_type()->NumClasses();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
178 i.Advance()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100179 DCHECK_LT(0, num_classes);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180 Handle<Map> map = i.Current();
181 Node* check =
182 graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
183 receiver_map, jsgraph()->Constant(map));
Ben Murdochda12d292016-06-02 14:46:10 +0100184 if (--num_classes == 0 && j == access_infos.size() - 1) {
185 this_controls.push_back(
186 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
187 this_effect, fallthrough_control));
188 this_effects.push_back(this_effect);
189 fallthrough_control = nullptr;
190 } else {
191 Node* branch =
192 graph()->NewNode(common()->Branch(), check, fallthrough_control);
193 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
194 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
195 this_effects.push_back(this_effect);
196 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197 }
198
199 // The Number case requires special treatment to also deal with Smis.
200 if (receiver_type->Is(Type::Number())) {
Ben Murdochda12d292016-06-02 14:46:10 +0100201 // Join this check with the "receiver is smi" check above.
202 DCHECK_NOT_NULL(receiverissmi_effect);
203 DCHECK_NOT_NULL(receiverissmi_control);
204 this_effects.push_back(receiverissmi_effect);
205 this_controls.push_back(receiverissmi_control);
206 receiverissmi_effect = receiverissmi_control = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000207 }
208
209 // Create dominating Merge+EffectPhi for this {receiver} type.
210 int const this_control_count = static_cast<int>(this_controls.size());
211 this_control =
212 (this_control_count == 1)
213 ? this_controls.front()
214 : graph()->NewNode(common()->Merge(this_control_count),
215 this_control_count, &this_controls.front());
216 this_effects.push_back(this_control);
217 int const this_effect_count = static_cast<int>(this_effects.size());
218 this_effect =
219 (this_control_count == 1)
220 ? this_effects.front()
221 : graph()->NewNode(common()->EffectPhi(this_control_count),
222 this_effect_count, &this_effects.front());
223 }
224
225 // Determine actual holder and perform prototype chain checks.
226 Handle<JSObject> holder;
227 if (access_info.holder().ToHandle(&holder)) {
228 AssumePrototypesStable(receiver_type, native_context, holder);
229 }
230
231 // Generate the actual property access.
232 if (access_info.IsNotFound()) {
233 DCHECK_EQ(AccessMode::kLoad, access_mode);
Ben Murdochda12d292016-06-02 14:46:10 +0100234 this_value = jsgraph()->UndefinedConstant();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000235 } else if (access_info.IsDataConstant()) {
236 this_value = jsgraph()->Constant(access_info.constant());
237 if (access_mode == AccessMode::kStore) {
238 Node* check = graph()->NewNode(
239 simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100240 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
241 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 }
243 } else {
244 DCHECK(access_info.IsDataField());
245 FieldIndex const field_index = access_info.field_index();
246 FieldCheck const field_check = access_info.field_check();
247 Type* const field_type = access_info.field_type();
248 switch (field_check) {
249 case FieldCheck::kNone:
250 break;
251 case FieldCheck::kJSArrayBufferViewBufferNotNeutered: {
252 Node* this_buffer = this_effect =
253 graph()->NewNode(simplified()->LoadField(
254 AccessBuilder::ForJSArrayBufferViewBuffer()),
255 this_receiver, this_effect, this_control);
256 Node* this_buffer_bit_field = this_effect =
257 graph()->NewNode(simplified()->LoadField(
258 AccessBuilder::ForJSArrayBufferBitField()),
259 this_buffer, this_effect, this_control);
260 Node* check = graph()->NewNode(
261 machine()->Word32Equal(),
262 graph()->NewNode(machine()->Word32And(), this_buffer_bit_field,
263 jsgraph()->Int32Constant(
264 1 << JSArrayBuffer::WasNeutered::kShift)),
265 jsgraph()->Int32Constant(0));
Ben Murdochda12d292016-06-02 14:46:10 +0100266 this_control =
Ben Murdochc5610432016-08-08 18:44:38 +0100267 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
Ben Murdochda12d292016-06-02 14:46:10 +0100268 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 break;
270 }
271 }
272 if (access_mode == AccessMode::kLoad &&
273 access_info.holder().ToHandle(&holder)) {
274 this_receiver = jsgraph()->Constant(holder);
275 }
276 Node* this_storage = this_receiver;
277 if (!field_index.is_inobject()) {
278 this_storage = this_effect = graph()->NewNode(
279 simplified()->LoadField(AccessBuilder::ForJSObjectProperties()),
280 this_storage, this_effect, this_control);
281 }
Ben Murdochc5610432016-08-08 18:44:38 +0100282 FieldAccess field_access = {
283 kTaggedBase, field_index.offset(), name,
284 field_type, MachineType::AnyTagged(), kFullWriteBarrier};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 if (access_mode == AccessMode::kLoad) {
286 if (field_type->Is(Type::UntaggedFloat64())) {
287 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
288 !FLAG_unbox_double_fields) {
289 this_storage = this_effect =
290 graph()->NewNode(simplified()->LoadField(field_access),
291 this_storage, this_effect, this_control);
292 field_access.offset = HeapNumber::kValueOffset;
293 field_access.name = MaybeHandle<Name>();
294 }
295 field_access.machine_type = MachineType::Float64();
296 }
297 this_value = this_effect =
298 graph()->NewNode(simplified()->LoadField(field_access),
299 this_storage, this_effect, this_control);
300 } else {
301 DCHECK_EQ(AccessMode::kStore, access_mode);
302 if (field_type->Is(Type::UntaggedFloat64())) {
303 Node* check =
304 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100305 this_control =
306 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
307 this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100308 this_value = graph()->NewNode(simplified()->TypeGuard(Type::Number()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309 this_value, this_control);
310
311 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
312 !FLAG_unbox_double_fields) {
313 if (access_info.HasTransitionMap()) {
314 // Allocate a MutableHeapNumber for the new property.
Ben Murdochc5610432016-08-08 18:44:38 +0100315 this_effect =
316 graph()->NewNode(common()->BeginRegion(), this_effect);
317 Node* this_box = this_effect =
318 graph()->NewNode(simplified()->Allocate(NOT_TENURED),
319 jsgraph()->Constant(HeapNumber::kSize),
320 this_effect, this_control);
321 this_effect = graph()->NewNode(
322 simplified()->StoreField(AccessBuilder::ForMap()), this_box,
323 jsgraph()->HeapConstant(factory()->mutable_heap_number_map()),
324 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 this_effect = graph()->NewNode(
326 simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
327 this_box, this_value, this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100328 this_value = this_effect = graph()->NewNode(
329 common()->FinishRegion(), this_box, this_effect);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330
331 field_access.type = Type::TaggedPointer();
332 } else {
333 // We just store directly to the MutableHeapNumber.
334 this_storage = this_effect =
335 graph()->NewNode(simplified()->LoadField(field_access),
336 this_storage, this_effect, this_control);
337 field_access.offset = HeapNumber::kValueOffset;
338 field_access.name = MaybeHandle<Name>();
339 field_access.machine_type = MachineType::Float64();
340 }
341 } else {
342 // Unboxed double field, we store directly to the field.
343 field_access.machine_type = MachineType::Float64();
344 }
345 } else if (field_type->Is(Type::TaggedSigned())) {
346 Node* check =
347 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100348 this_control =
349 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
350 this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100351 this_value =
352 graph()->NewNode(simplified()->TypeGuard(type_cache_.kSmi),
353 this_value, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000354 } else if (field_type->Is(Type::TaggedPointer())) {
355 Node* check =
356 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100357 this_control =
358 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
359 this_effect, this_control);
360 if (field_type->NumClasses() == 1) {
361 // Emit a map check for the value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000362 Node* this_value_map = this_effect = graph()->NewNode(
363 simplified()->LoadField(AccessBuilder::ForMap()), this_value,
364 this_effect, this_control);
Ben Murdochda12d292016-06-02 14:46:10 +0100365 Node* check = graph()->NewNode(
366 simplified()->ReferenceEqual(Type::Internal()), this_value_map,
367 jsgraph()->Constant(field_type->Classes().Current()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 this_control =
Ben Murdochda12d292016-06-02 14:46:10 +0100369 graph()->NewNode(common()->DeoptimizeUnless(), check,
370 frame_state, this_effect, this_control);
371 } else {
372 DCHECK_EQ(0, field_type->NumClasses());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000373 }
374 } else {
375 DCHECK(field_type->Is(Type::Tagged()));
376 }
377 Handle<Map> transition_map;
378 if (access_info.transition_map().ToHandle(&transition_map)) {
379 this_effect = graph()->NewNode(common()->BeginRegion(), this_effect);
380 this_effect = graph()->NewNode(
381 simplified()->StoreField(AccessBuilder::ForMap()), this_receiver,
382 jsgraph()->Constant(transition_map), this_effect, this_control);
383 }
384 this_effect = graph()->NewNode(simplified()->StoreField(field_access),
385 this_storage, this_value, this_effect,
386 this_control);
387 if (access_info.HasTransitionMap()) {
388 this_effect =
389 graph()->NewNode(common()->FinishRegion(),
390 jsgraph()->UndefinedConstant(), this_effect);
391 }
392 }
393 }
394
395 // Remember the final state for this property access.
396 values.push_back(this_value);
397 effects.push_back(this_effect);
398 controls.push_back(this_control);
399 }
400
Ben Murdochda12d292016-06-02 14:46:10 +0100401 DCHECK_NULL(fallthrough_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402
403 // Generate the final merge point for all (polymorphic) branches.
404 int const control_count = static_cast<int>(controls.size());
405 if (control_count == 0) {
406 value = effect = control = jsgraph()->Dead();
407 } else if (control_count == 1) {
408 value = values.front();
409 effect = effects.front();
410 control = controls.front();
411 } else {
412 control = graph()->NewNode(common()->Merge(control_count), control_count,
413 &controls.front());
414 values.push_back(control);
415 value = graph()->NewNode(
416 common()->Phi(MachineRepresentation::kTagged, control_count),
417 control_count + 1, &values.front());
418 effects.push_back(control);
419 effect = graph()->NewNode(common()->EffectPhi(control_count),
420 control_count + 1, &effects.front());
421 }
422 ReplaceWithValue(node, value, effect, control);
423 return Replace(value);
424}
425
426
Ben Murdoch097c5b22016-05-18 11:27:45 +0100427Reduction JSNativeContextSpecialization::ReduceNamedAccess(
428 Node* node, Node* value, FeedbackNexus const& nexus, Handle<Name> name,
429 AccessMode access_mode, LanguageMode language_mode) {
430 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
431 node->opcode() == IrOpcode::kJSStoreNamed);
Ben Murdochc5610432016-08-08 18:44:38 +0100432 Node* const receiver = NodeProperties::GetValueInput(node, 0);
433 Node* const effect = NodeProperties::GetEffectInput(node);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100434
435 // Check if the {nexus} reports type feedback for the IC.
436 if (nexus.IsUninitialized()) {
437 if ((flags() & kDeoptimizationEnabled) &&
438 (flags() & kBailoutOnUninitialized)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100439 return ReduceSoftDeoptimize(node);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100440 }
441 return NoChange();
442 }
443
444 // Extract receiver maps from the IC using the {nexus}.
445 MapHandleList receiver_maps;
Ben Murdochc5610432016-08-08 18:44:38 +0100446 if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
447 return NoChange();
448 } else if (receiver_maps.length() == 0) {
449 if ((flags() & kDeoptimizationEnabled) &&
450 (flags() & kBailoutOnUninitialized)) {
451 return ReduceSoftDeoptimize(node);
452 }
453 return NoChange();
454 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100455
456 // Try to lower the named access based on the {receiver_maps}.
457 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
458 language_mode);
459}
460
461
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
463 DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
464 NamedAccess const& p = NamedAccessOf(node->op());
Ben Murdochc5610432016-08-08 18:44:38 +0100465 Node* const receiver = NodeProperties::GetValueInput(node, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466 Node* const value = jsgraph()->Dead();
467
Ben Murdochc5610432016-08-08 18:44:38 +0100468 // Check if we have a constant receiver.
469 HeapObjectMatcher m(receiver);
470 if (m.HasValue()) {
471 // Optimize "prototype" property of functions.
472 if (m.Value()->IsJSFunction() &&
473 p.name().is_identical_to(factory()->prototype_string())) {
474 Handle<JSFunction> function = Handle<JSFunction>::cast(m.Value());
475 if (function->has_initial_map()) {
476 // We need to add a code dependency on the initial map of the
477 // {function} in order to be notified about changes to the
478 // "prototype" of {function}, so it doesn't make sense to
479 // continue unless deoptimization is enabled.
480 if (flags() & kDeoptimizationEnabled) {
481 Handle<Map> initial_map(function->initial_map(), isolate());
482 dependencies()->AssumeInitialMapCantChange(initial_map);
483 Handle<Object> prototype(initial_map->prototype(), isolate());
484 Node* value = jsgraph()->Constant(prototype);
485 ReplaceWithValue(node, value);
486 return Replace(value);
487 }
488 }
489 }
490 }
491
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492 // Extract receiver maps from the LOAD_IC using the LoadICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 if (!p.feedback().IsValid()) return NoChange();
494 LoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000495
496 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100497 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kLoad,
498 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000499}
500
501
502Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
503 DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
504 NamedAccess const& p = NamedAccessOf(node->op());
505 Node* const value = NodeProperties::GetValueInput(node, 1);
506
507 // Extract receiver maps from the STORE_IC using the StoreICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 if (!p.feedback().IsValid()) return NoChange();
509 StoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000510
511 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100512 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kStore,
513 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000514}
515
516
517Reduction JSNativeContextSpecialization::ReduceElementAccess(
518 Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
519 AccessMode access_mode, LanguageMode language_mode,
520 KeyedAccessStoreMode store_mode) {
521 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
522 node->opcode() == IrOpcode::kJSStoreProperty);
523 Node* receiver = NodeProperties::GetValueInput(node, 0);
524 Node* context = NodeProperties::GetContextInput(node);
525 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
526 Node* effect = NodeProperties::GetEffectInput(node);
527 Node* control = NodeProperties::GetControlInput(node);
528
529 // Not much we can do if deoptimization support is disabled.
530 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
531
532 // TODO(bmeurer): Add support for non-standard stores.
533 if (store_mode != STANDARD_STORE) return NoChange();
534
535 // Retrieve the native context from the given {node}.
536 Handle<Context> native_context;
537 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
538
539 // Compute element access infos for the receiver maps.
540 AccessInfoFactory access_info_factory(dependencies(), native_context,
541 graph()->zone());
542 ZoneVector<ElementAccessInfo> access_infos(zone());
543 if (!access_info_factory.ComputeElementAccessInfos(receiver_maps, access_mode,
544 &access_infos)) {
545 return NoChange();
546 }
547
548 // Nothing to do if we have no non-deprecated maps.
549 if (access_infos.empty()) return NoChange();
550
551 // The final states for every polymorphic branch. We join them with
552 // Merge+Phi+EffectPhi at the bottom.
553 ZoneVector<Node*> values(zone());
554 ZoneVector<Node*> effects(zone());
555 ZoneVector<Node*> controls(zone());
556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557 // Ensure that {receiver} is a heap object.
558 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
Ben Murdochda12d292016-06-02 14:46:10 +0100559 control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
560 effect, control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561
562 // Load the {receiver} map. The resulting effect is the dominating effect for
563 // all (polymorphic) branches.
564 Node* receiver_map = effect =
565 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
566 receiver, effect, control);
567
568 // Generate code for the various different element access patterns.
569 Node* fallthrough_control = control;
Ben Murdochda12d292016-06-02 14:46:10 +0100570 for (size_t j = 0; j < access_infos.size(); ++j) {
571 ElementAccessInfo const& access_info = access_infos[j];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 Node* this_receiver = receiver;
573 Node* this_value = value;
574 Node* this_index = index;
575 Node* this_effect;
576 Node* this_control;
577
578 // Perform map check on {receiver}.
579 Type* receiver_type = access_info.receiver_type();
580 bool receiver_is_jsarray = true;
581 {
582 ZoneVector<Node*> this_controls(zone());
583 ZoneVector<Node*> this_effects(zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100584 size_t num_transitions = access_info.transitions().size();
585 int num_classes = access_info.receiver_type()->NumClasses();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
587 i.Advance()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100588 DCHECK_LT(0, num_classes);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000589 Handle<Map> map = i.Current();
590 Node* check =
591 graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
592 receiver_map, jsgraph()->Constant(map));
Ben Murdochda12d292016-06-02 14:46:10 +0100593 if (--num_classes == 0 && num_transitions == 0 &&
594 j == access_infos.size() - 1) {
595 // Last map check on the fallthrough control path, do a conditional
596 // eager deoptimization exit here.
597 // TODO(turbofan): This is ugly as hell! We should probably introduce
598 // macro-ish operators for property access that encapsulate this whole
599 // mess.
600 this_controls.push_back(graph()->NewNode(common()->DeoptimizeUnless(),
601 check, frame_state, effect,
602 fallthrough_control));
603 fallthrough_control = nullptr;
604 } else {
605 Node* branch =
606 graph()->NewNode(common()->Branch(), check, fallthrough_control);
607 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
608 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
609 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610 this_effects.push_back(effect);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000611 if (!map->IsJSArrayMap()) receiver_is_jsarray = false;
612 }
613
614 // Generate possible elements kind transitions.
615 for (auto transition : access_info.transitions()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100616 DCHECK_LT(0u, num_transitions);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617 Handle<Map> transition_source = transition.first;
618 Handle<Map> transition_target = transition.second;
Ben Murdochda12d292016-06-02 14:46:10 +0100619 Node* transition_control;
620 Node* transition_effect = effect;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621
622 // Check if {receiver} has the specified {transition_source} map.
623 Node* check = graph()->NewNode(
624 simplified()->ReferenceEqual(Type::Any()), receiver_map,
625 jsgraph()->HeapConstant(transition_source));
Ben Murdochda12d292016-06-02 14:46:10 +0100626 if (--num_transitions == 0 && j == access_infos.size() - 1) {
627 transition_control =
628 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
629 transition_effect, fallthrough_control);
630 fallthrough_control = nullptr;
631 } else {
632 Node* branch =
633 graph()->NewNode(common()->Branch(), check, fallthrough_control);
634 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
635 transition_control = graph()->NewNode(common()->IfTrue(), branch);
636 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000637
638 // Migrate {receiver} from {transition_source} to {transition_target}.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 if (IsSimpleMapChangeTransition(transition_source->elements_kind(),
640 transition_target->elements_kind())) {
641 // In-place migration, just store the {transition_target} map.
642 transition_effect = graph()->NewNode(
643 simplified()->StoreField(AccessBuilder::ForMap()), receiver,
644 jsgraph()->HeapConstant(transition_target), transition_effect,
645 transition_control);
646 } else {
647 // Instance migration, let the stub deal with the {receiver}.
648 TransitionElementsKindStub stub(isolate(),
649 transition_source->elements_kind(),
650 transition_target->elements_kind(),
651 transition_source->IsJSArrayMap());
652 CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
653 isolate(), graph()->zone(), stub.GetCallInterfaceDescriptor(), 0,
654 CallDescriptor::kNeedsFrameState, node->op()->properties());
655 transition_effect = graph()->NewNode(
656 common()->Call(desc), jsgraph()->HeapConstant(stub.GetCode()),
657 receiver, jsgraph()->HeapConstant(transition_target), context,
658 frame_state, transition_effect, transition_control);
659 }
660 this_controls.push_back(transition_control);
661 this_effects.push_back(transition_effect);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 }
663
664 // Create single chokepoint for the control.
665 int const this_control_count = static_cast<int>(this_controls.size());
666 if (this_control_count == 1) {
667 this_control = this_controls.front();
668 this_effect = this_effects.front();
669 } else {
670 this_control =
671 graph()->NewNode(common()->Merge(this_control_count),
672 this_control_count, &this_controls.front());
673 this_effects.push_back(this_control);
674 this_effect =
675 graph()->NewNode(common()->EffectPhi(this_control_count),
676 this_control_count + 1, &this_effects.front());
677 }
678 }
679
680 // Certain stores need a prototype chain check because shape changes
681 // could allow callbacks on elements in the prototype chain that are
682 // not compatible with (monomorphic) keyed stores.
683 Handle<JSObject> holder;
684 if (access_info.holder().ToHandle(&holder)) {
685 AssumePrototypesStable(receiver_type, native_context, holder);
686 }
687
688 // Check that the {index} is actually a Number.
689 if (!NumberMatcher(this_index).HasValue()) {
690 Node* check =
691 graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
Ben Murdochda12d292016-06-02 14:46:10 +0100692 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
693 frame_state, this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100694 this_index = graph()->NewNode(simplified()->TypeGuard(Type::Number()),
695 this_index, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000696 }
697
698 // Convert the {index} to an unsigned32 value and check if the result is
699 // equal to the original {index}.
700 if (!NumberMatcher(this_index).IsInRange(0.0, kMaxUInt32)) {
701 Node* this_index32 =
702 graph()->NewNode(simplified()->NumberToUint32(), this_index);
703 Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
704 this_index);
Ben Murdochda12d292016-06-02 14:46:10 +0100705 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
706 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 this_index = this_index32;
708 }
709
710 // TODO(bmeurer): We currently specialize based on elements kind. We should
711 // also be able to properly support strings and other JSObjects here.
712 ElementsKind elements_kind = access_info.elements_kind();
713
714 // Load the elements for the {receiver}.
715 Node* this_elements = this_effect = graph()->NewNode(
716 simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
717 this_receiver, this_effect, this_control);
718
719 // Don't try to store to a copy-on-write backing store.
720 if (access_mode == AccessMode::kStore &&
721 IsFastSmiOrObjectElementsKind(elements_kind)) {
722 Node* this_elements_map = this_effect =
723 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
724 this_elements, this_effect, this_control);
Ben Murdochda12d292016-06-02 14:46:10 +0100725 Node* check = graph()->NewNode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726 simplified()->ReferenceEqual(Type::Any()), this_elements_map,
727 jsgraph()->HeapConstant(factory()->fixed_array_map()));
Ben Murdochda12d292016-06-02 14:46:10 +0100728 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
729 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000730 }
731
732 // Load the length of the {receiver}.
733 Node* this_length = this_effect =
734 receiver_is_jsarray
735 ? graph()->NewNode(
736 simplified()->LoadField(
737 AccessBuilder::ForJSArrayLength(elements_kind)),
738 this_receiver, this_effect, this_control)
739 : graph()->NewNode(
740 simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
741 this_elements, this_effect, this_control);
742
743 // Check that the {index} is in the valid range for the {receiver}.
744 Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
745 this_length);
Ben Murdochda12d292016-06-02 14:46:10 +0100746 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
747 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748
749 // Compute the element access.
750 Type* element_type = Type::Any();
751 MachineType element_machine_type = MachineType::AnyTagged();
752 if (IsFastDoubleElementsKind(elements_kind)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100753 element_type = Type::Number();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000754 element_machine_type = MachineType::Float64();
755 } else if (IsFastSmiElementsKind(elements_kind)) {
756 element_type = type_cache_.kSmi;
757 }
758 ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
Ben Murdochc5610432016-08-08 18:44:38 +0100759 element_type, element_machine_type,
760 kFullWriteBarrier};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761
762 // Access the actual element.
763 // TODO(bmeurer): Refactor this into separate methods or even a separate
764 // class that deals with the elements access.
765 if (access_mode == AccessMode::kLoad) {
766 // Compute the real element access type, which includes the hole in case
767 // of holey backing stores.
768 if (elements_kind == FAST_HOLEY_ELEMENTS ||
769 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
770 element_access.type = Type::Union(
771 element_type,
772 Type::Constant(factory()->the_hole_value(), graph()->zone()),
773 graph()->zone());
774 }
775 // Perform the actual backing store access.
776 this_value = this_effect = graph()->NewNode(
777 simplified()->LoadElement(element_access), this_elements, this_index,
778 this_effect, this_control);
779 // Handle loading from holey backing stores correctly, by either mapping
780 // the hole to undefined if possible, or deoptimizing otherwise.
781 if (elements_kind == FAST_HOLEY_ELEMENTS ||
782 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
783 // Perform the hole check on the result.
784 Node* check =
785 graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
786 this_value, jsgraph()->TheHoleConstant());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000787 // Check if we are allowed to turn the hole into undefined.
788 Type* initial_holey_array_type = Type::Class(
789 handle(isolate()->get_initial_js_array_map(elements_kind)),
790 graph()->zone());
791 if (receiver_type->NowIs(initial_holey_array_type) &&
792 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100793 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
794 check, this_control);
795 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
796 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 // Add a code dependency on the array protector cell.
798 AssumePrototypesStable(receiver_type, native_context,
799 isolate()->initial_object_prototype());
800 dependencies()->AssumePropertyCell(factory()->array_protector());
801 // Turn the hole into undefined.
802 this_control =
803 graph()->NewNode(common()->Merge(2), if_true, if_false);
804 this_value = graph()->NewNode(
805 common()->Phi(MachineRepresentation::kTagged, 2),
806 jsgraph()->UndefinedConstant(), this_value, this_control);
807 element_type =
808 Type::Union(element_type, Type::Undefined(), graph()->zone());
809 } else {
810 // Deoptimize in case of the hole.
Ben Murdochda12d292016-06-02 14:46:10 +0100811 this_control =
812 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
813 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000814 }
815 // Rename the result to represent the actual type (not polluted by the
816 // hole).
Ben Murdochc5610432016-08-08 18:44:38 +0100817 this_value = graph()->NewNode(simplified()->TypeGuard(element_type),
818 this_value, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000819 } else if (elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
820 // Perform the hole check on the result.
821 Node* check =
822 graph()->NewNode(simplified()->NumberIsHoleNaN(), this_value);
823 // Check if we are allowed to return the hole directly.
824 Type* initial_holey_array_type = Type::Class(
825 handle(isolate()->get_initial_js_array_map(elements_kind)),
826 graph()->zone());
827 if (receiver_type->NowIs(initial_holey_array_type) &&
828 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
829 // Add a code dependency on the array protector cell.
830 AssumePrototypesStable(receiver_type, native_context,
831 isolate()->initial_object_prototype());
832 dependencies()->AssumePropertyCell(factory()->array_protector());
833 // Turn the hole into undefined.
834 this_value = graph()->NewNode(
835 common()->Select(MachineRepresentation::kTagged,
836 BranchHint::kFalse),
837 check, jsgraph()->UndefinedConstant(), this_value);
838 } else {
839 // Deoptimize in case of the hole.
Ben Murdochda12d292016-06-02 14:46:10 +0100840 this_control =
841 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
842 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000843 }
844 }
845 } else {
846 DCHECK_EQ(AccessMode::kStore, access_mode);
847 if (IsFastSmiElementsKind(elements_kind)) {
848 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100849 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
850 frame_state, this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100851 this_value = graph()->NewNode(simplified()->TypeGuard(type_cache_.kSmi),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000852 this_value, this_control);
853 } else if (IsFastDoubleElementsKind(elements_kind)) {
854 Node* check =
855 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100856 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
857 frame_state, this_effect, this_control);
Ben Murdochc5610432016-08-08 18:44:38 +0100858 this_value = graph()->NewNode(simplified()->TypeGuard(Type::Number()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000859 this_value, this_control);
860 }
861 this_effect = graph()->NewNode(simplified()->StoreElement(element_access),
862 this_elements, this_index, this_value,
863 this_effect, this_control);
864 }
865
866 // Remember the final state for this element access.
867 values.push_back(this_value);
868 effects.push_back(this_effect);
869 controls.push_back(this_control);
870 }
871
Ben Murdochda12d292016-06-02 14:46:10 +0100872 DCHECK_NULL(fallthrough_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873
874 // Generate the final merge point for all (polymorphic) branches.
875 int const control_count = static_cast<int>(controls.size());
876 if (control_count == 0) {
877 value = effect = control = jsgraph()->Dead();
878 } else if (control_count == 1) {
879 value = values.front();
880 effect = effects.front();
881 control = controls.front();
882 } else {
883 control = graph()->NewNode(common()->Merge(control_count), control_count,
884 &controls.front());
885 values.push_back(control);
886 value = graph()->NewNode(
887 common()->Phi(MachineRepresentation::kTagged, control_count),
888 control_count + 1, &values.front());
889 effects.push_back(control);
890 effect = graph()->NewNode(common()->EffectPhi(control_count),
891 control_count + 1, &effects.front());
892 }
893 ReplaceWithValue(node, value, effect, control);
894 return Replace(value);
895}
896
897
898Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
899 Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
900 AccessMode access_mode, LanguageMode language_mode,
901 KeyedAccessStoreMode store_mode) {
902 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
903 node->opcode() == IrOpcode::kJSStoreProperty);
Ben Murdochc5610432016-08-08 18:44:38 +0100904 Node* const receiver = NodeProperties::GetValueInput(node, 0);
905 Node* const effect = NodeProperties::GetEffectInput(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000906
Ben Murdoch097c5b22016-05-18 11:27:45 +0100907 // Check if the {nexus} reports type feedback for the IC.
908 if (nexus.IsUninitialized()) {
909 if ((flags() & kDeoptimizationEnabled) &&
910 (flags() & kBailoutOnUninitialized)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100911 return ReduceSoftDeoptimize(node);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100912 }
913 return NoChange();
914 }
915
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000916 // Extract receiver maps from the {nexus}.
917 MapHandleList receiver_maps;
Ben Murdochc5610432016-08-08 18:44:38 +0100918 if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
919 return NoChange();
920 } else if (receiver_maps.length() == 0) {
921 if ((flags() & kDeoptimizationEnabled) &&
922 (flags() & kBailoutOnUninitialized)) {
923 return ReduceSoftDeoptimize(node);
924 }
925 return NoChange();
926 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927
928 // Optimize access for constant {index}.
929 HeapObjectMatcher mindex(index);
930 if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
931 // Keyed access requires a ToPropertyKey on the {index} first before
932 // looking up the property on the object (see ES6 section 12.3.2.1).
933 // We can only do this for non-observable ToPropertyKey invocations,
934 // so we limit the constant indices to primitives at this point.
935 Handle<Name> name;
936 if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
937 uint32_t array_index;
938 if (name->AsArrayIndex(&array_index)) {
939 // Use the constant array index.
940 index = jsgraph()->Constant(static_cast<double>(array_index));
941 } else {
942 name = factory()->InternalizeName(name);
943 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
944 language_mode);
945 }
946 }
947 }
948
949 // Check if we have feedback for a named access.
950 if (Name* name = nexus.FindFirstName()) {
951 return ReduceNamedAccess(node, value, receiver_maps,
952 handle(name, isolate()), access_mode,
953 language_mode, index);
954 }
955
956 // Try to lower the element access based on the {receiver_maps}.
957 return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
958 language_mode, store_mode);
959}
960
961
Ben Murdoch097c5b22016-05-18 11:27:45 +0100962Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(Node* node) {
963 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
964 Node* effect = NodeProperties::GetEffectInput(node);
965 Node* control = NodeProperties::GetControlInput(node);
966 Node* deoptimize =
967 graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kSoft), frame_state,
968 effect, control);
969 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
970 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
971 Revisit(graph()->end());
972 node->TrimInputCount(0);
973 NodeProperties::ChangeOp(node, common()->Dead());
974 return Changed(node);
975}
976
977
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
979 DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
980 PropertyAccess const& p = PropertyAccessOf(node->op());
981 Node* const index = NodeProperties::GetValueInput(node, 1);
982 Node* const value = jsgraph()->Dead();
983
984 // Extract receiver maps from the KEYED_LOAD_IC using the KeyedLoadICNexus.
985 if (!p.feedback().IsValid()) return NoChange();
986 KeyedLoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
987
988 // Try to lower the keyed access based on the {nexus}.
989 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
990 p.language_mode(), STANDARD_STORE);
991}
992
993
994Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
995 DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
996 PropertyAccess const& p = PropertyAccessOf(node->op());
997 Node* const index = NodeProperties::GetValueInput(node, 1);
998 Node* const value = NodeProperties::GetValueInput(node, 2);
999
1000 // Extract receiver maps from the KEYED_STORE_IC using the KeyedStoreICNexus.
1001 if (!p.feedback().IsValid()) return NoChange();
1002 KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
1003
1004 // Extract the keyed access store mode from the KEYED_STORE_IC.
1005 KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
1006
1007 // Try to lower the keyed access based on the {nexus}.
1008 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
1009 p.language_mode(), store_mode);
1010}
1011
1012
1013void JSNativeContextSpecialization::AssumePrototypesStable(
1014 Type* receiver_type, Handle<Context> native_context,
1015 Handle<JSObject> holder) {
1016 // Determine actual holder and perform prototype chain checks.
1017 for (auto i = receiver_type->Classes(); !i.Done(); i.Advance()) {
1018 Handle<Map> map = i.Current();
1019 // Perform the implicit ToObject for primitives here.
1020 // Implemented according to ES6 section 7.3.2 GetV (V, P).
1021 Handle<JSFunction> constructor;
1022 if (Map::GetConstructorFunction(map, native_context)
1023 .ToHandle(&constructor)) {
1024 map = handle(constructor->initial_map(), isolate());
1025 }
1026 dependencies()->AssumePrototypeMapsStable(map, holder);
1027 }
1028}
1029
Ben Murdochc5610432016-08-08 18:44:38 +01001030bool JSNativeContextSpecialization::ExtractReceiverMaps(
1031 Node* receiver, Node* effect, FeedbackNexus const& nexus,
1032 MapHandleList* receiver_maps) {
1033 DCHECK_EQ(0, receiver_maps->length());
1034 // See if we can infer a concrete type for the {receiver}.
1035 Handle<Map> receiver_map;
1036 if (InferReceiverMap(receiver, effect).ToHandle(&receiver_map)) {
1037 // We can assume that the {receiver} still has the infered {receiver_map}.
1038 receiver_maps->Add(receiver_map);
1039 return true;
1040 }
1041 // Try to extract some maps from the {nexus}.
1042 if (nexus.ExtractMaps(receiver_maps) != 0) {
1043 // Try to filter impossible candidates based on infered root map.
1044 if (InferReceiverRootMap(receiver).ToHandle(&receiver_map)) {
1045 for (int i = receiver_maps->length(); --i >= 0;) {
1046 if (receiver_maps->at(i)->FindRootMap() != *receiver_map) {
1047 receiver_maps->Remove(i);
1048 }
1049 }
1050 }
1051 return true;
1052 }
1053 return false;
1054}
1055
1056MaybeHandle<Map> JSNativeContextSpecialization::InferReceiverMap(Node* receiver,
1057 Node* effect) {
1058 NodeMatcher m(receiver);
1059 if (m.IsJSCreate()) {
1060 HeapObjectMatcher mtarget(m.InputAt(0));
1061 HeapObjectMatcher mnewtarget(m.InputAt(1));
1062 if (mtarget.HasValue() && mnewtarget.HasValue()) {
1063 Handle<JSFunction> constructor =
1064 Handle<JSFunction>::cast(mtarget.Value());
1065 if (constructor->has_initial_map()) {
1066 Handle<Map> initial_map(constructor->initial_map(), isolate());
1067 if (initial_map->constructor_or_backpointer() == *mnewtarget.Value()) {
1068 // Walk up the {effect} chain to see if the {receiver} is the
1069 // dominating effect and there's no other observable write in
1070 // between.
1071 while (true) {
1072 if (receiver == effect) return initial_map;
1073 if (!effect->op()->HasProperty(Operator::kNoWrite) ||
1074 effect->op()->EffectInputCount() != 1) {
1075 break;
1076 }
1077 effect = NodeProperties::GetEffectInput(effect);
1078 }
1079 }
1080 }
1081 }
1082 }
1083 return MaybeHandle<Map>();
1084}
1085
1086MaybeHandle<Map> JSNativeContextSpecialization::InferReceiverRootMap(
1087 Node* receiver) {
1088 HeapObjectMatcher m(receiver);
1089 if (m.HasValue()) {
1090 return handle(m.Value()->map()->FindRootMap(), isolate());
1091 } else if (m.IsJSCreate()) {
1092 HeapObjectMatcher mtarget(m.InputAt(0));
1093 HeapObjectMatcher mnewtarget(m.InputAt(1));
1094 if (mtarget.HasValue() && mnewtarget.HasValue()) {
1095 Handle<JSFunction> constructor =
1096 Handle<JSFunction>::cast(mtarget.Value());
1097 if (constructor->has_initial_map()) {
1098 Handle<Map> initial_map(constructor->initial_map(), isolate());
1099 if (initial_map->constructor_or_backpointer() == *mnewtarget.Value()) {
1100 DCHECK_EQ(*initial_map, initial_map->FindRootMap());
1101 return initial_map;
1102 }
1103 }
1104 }
1105 }
1106 return MaybeHandle<Map>();
1107}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001109MaybeHandle<Context> JSNativeContextSpecialization::GetNativeContext(
1110 Node* node) {
1111 Node* const context = NodeProperties::GetContextInput(node);
1112 return NodeProperties::GetSpecializationNativeContext(context,
1113 native_context());
1114}
1115
1116
1117Graph* JSNativeContextSpecialization::graph() const {
1118 return jsgraph()->graph();
1119}
1120
1121
1122Isolate* JSNativeContextSpecialization::isolate() const {
1123 return jsgraph()->isolate();
1124}
1125
1126
1127Factory* JSNativeContextSpecialization::factory() const {
1128 return isolate()->factory();
1129}
1130
1131
1132MachineOperatorBuilder* JSNativeContextSpecialization::machine() const {
1133 return jsgraph()->machine();
1134}
1135
1136
1137CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
1138 return jsgraph()->common();
1139}
1140
1141
1142JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
1143 return jsgraph()->javascript();
1144}
1145
1146
1147SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
1148 return jsgraph()->simplified();
1149}
1150
1151} // namespace compiler
1152} // namespace internal
1153} // namespace v8