blob: d1353d20bebfbcf276065e7d6f43daa9a6c55701 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/js-native-context-specialization.h"
6
7#include "src/accessors.h"
8#include "src/code-factory.h"
9#include "src/compilation-dependencies.h"
10#include "src/compiler/access-builder.h"
11#include "src/compiler/access-info.h"
12#include "src/compiler/js-graph.h"
13#include "src/compiler/js-operator.h"
14#include "src/compiler/linkage.h"
15#include "src/compiler/node-matchers.h"
16#include "src/field-index-inl.h"
17#include "src/isolate-inl.h"
18#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
19#include "src/type-cache.h"
20#include "src/type-feedback-vector.h"
21
22namespace v8 {
23namespace internal {
24namespace compiler {
25
26JSNativeContextSpecialization::JSNativeContextSpecialization(
27 Editor* editor, JSGraph* jsgraph, Flags flags,
28 MaybeHandle<Context> native_context, CompilationDependencies* dependencies,
29 Zone* zone)
30 : AdvancedReducer(editor),
31 jsgraph_(jsgraph),
32 flags_(flags),
33 native_context_(native_context),
34 dependencies_(dependencies),
35 zone_(zone),
36 type_cache_(TypeCache::Get()) {}
37
38
39Reduction JSNativeContextSpecialization::Reduce(Node* node) {
40 switch (node->opcode()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 case IrOpcode::kJSLoadContext:
42 return ReduceJSLoadContext(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 case IrOpcode::kJSLoadNamed:
44 return ReduceJSLoadNamed(node);
45 case IrOpcode::kJSStoreNamed:
46 return ReduceJSStoreNamed(node);
47 case IrOpcode::kJSLoadProperty:
48 return ReduceJSLoadProperty(node);
49 case IrOpcode::kJSStoreProperty:
50 return ReduceJSStoreProperty(node);
51 default:
52 break;
53 }
54 return NoChange();
55}
56
Ben Murdoch097c5b22016-05-18 11:27:45 +010057Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
58 DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
59 ContextAccess const& access = ContextAccessOf(node->op());
60 Handle<Context> native_context;
61 // Specialize JSLoadContext(NATIVE_CONTEXT_INDEX) to the known native
62 // context (if any), so we can constant-fold those fields, which is
63 // safe, since the NATIVE_CONTEXT_INDEX slot is always immutable.
64 if (access.index() == Context::NATIVE_CONTEXT_INDEX &&
65 GetNativeContext(node).ToHandle(&native_context)) {
66 Node* value = jsgraph()->HeapConstant(native_context);
67 ReplaceWithValue(node, value);
68 return Replace(value);
69 }
70 return NoChange();
71}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072
73Reduction JSNativeContextSpecialization::ReduceNamedAccess(
74 Node* node, Node* value, MapHandleList const& receiver_maps,
75 Handle<Name> name, AccessMode access_mode, LanguageMode language_mode,
76 Node* index) {
77 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
78 node->opcode() == IrOpcode::kJSStoreNamed ||
79 node->opcode() == IrOpcode::kJSLoadProperty ||
80 node->opcode() == IrOpcode::kJSStoreProperty);
81 Node* receiver = NodeProperties::GetValueInput(node, 0);
82 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
83 Node* effect = NodeProperties::GetEffectInput(node);
84 Node* control = NodeProperties::GetControlInput(node);
85
86 // Not much we can do if deoptimization support is disabled.
87 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
88
89 // Retrieve the native context from the given {node}.
90 Handle<Context> native_context;
91 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
92
93 // Compute property access infos for the receiver maps.
94 AccessInfoFactory access_info_factory(dependencies(), native_context,
95 graph()->zone());
96 ZoneVector<PropertyAccessInfo> access_infos(zone());
97 if (!access_info_factory.ComputePropertyAccessInfos(
98 receiver_maps, name, access_mode, &access_infos)) {
99 return NoChange();
100 }
101
102 // Nothing to do if we have no non-deprecated maps.
103 if (access_infos.empty()) return NoChange();
104
105 // The final states for every polymorphic branch. We join them with
106 // Merge++Phi+EffectPhi at the bottom.
107 ZoneVector<Node*> values(zone());
108 ZoneVector<Node*> effects(zone());
109 ZoneVector<Node*> controls(zone());
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 // Ensure that {index} matches the specified {name} (if {index} is given).
112 if (index != nullptr) {
113 Node* check = graph()->NewNode(simplified()->ReferenceEqual(Type::Name()),
114 index, jsgraph()->HeapConstant(name));
Ben Murdochda12d292016-06-02 14:46:10 +0100115 control = graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
116 effect, control);
117 }
118
119 // Check if {receiver} may be a number.
120 bool receiverissmi_possible = false;
121 for (PropertyAccessInfo const& access_info : access_infos) {
122 if (access_info.receiver_type()->Is(Type::Number())) {
123 receiverissmi_possible = true;
124 break;
125 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 }
127
128 // Ensure that {receiver} is a heap object.
129 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
Ben Murdochda12d292016-06-02 14:46:10 +0100130 Node* receiverissmi_control = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000131 Node* receiverissmi_effect = effect;
Ben Murdochda12d292016-06-02 14:46:10 +0100132 if (receiverissmi_possible) {
133 Node* branch = graph()->NewNode(common()->Branch(), check, control);
134 control = graph()->NewNode(common()->IfFalse(), branch);
135 receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
136 receiverissmi_effect = effect;
137 } else {
138 control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
139 effect, control);
140 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000141
142 // Load the {receiver} map. The resulting effect is the dominating effect for
143 // all (polymorphic) branches.
144 Node* receiver_map = effect =
145 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
146 receiver, effect, control);
147
148 // Generate code for the various different property access patterns.
149 Node* fallthrough_control = control;
Ben Murdochda12d292016-06-02 14:46:10 +0100150 for (size_t j = 0; j < access_infos.size(); ++j) {
151 PropertyAccessInfo const& access_info = access_infos[j];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 Node* this_value = value;
153 Node* this_receiver = receiver;
154 Node* this_effect = effect;
155 Node* this_control;
156
157 // Perform map check on {receiver}.
158 Type* receiver_type = access_info.receiver_type();
159 if (receiver_type->Is(Type::String())) {
160 // Emit an instance type check for strings.
161 Node* receiver_instance_type = this_effect = graph()->NewNode(
162 simplified()->LoadField(AccessBuilder::ForMapInstanceType()),
163 receiver_map, this_effect, fallthrough_control);
164 Node* check =
165 graph()->NewNode(machine()->Uint32LessThan(), receiver_instance_type,
166 jsgraph()->Uint32Constant(FIRST_NONSTRING_TYPE));
Ben Murdochda12d292016-06-02 14:46:10 +0100167 if (j == access_infos.size() - 1) {
168 this_control =
169 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
170 this_effect, fallthrough_control);
171 fallthrough_control = nullptr;
172 } else {
173 Node* branch =
174 graph()->NewNode(common()->Branch(), check, fallthrough_control);
175 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
176 this_control = graph()->NewNode(common()->IfTrue(), branch);
177 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000178 } else {
179 // Emit a (sequence of) map checks for other {receiver}s.
180 ZoneVector<Node*> this_controls(zone());
181 ZoneVector<Node*> this_effects(zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100182 int num_classes = access_info.receiver_type()->NumClasses();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
184 i.Advance()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100185 DCHECK_LT(0, num_classes);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000186 Handle<Map> map = i.Current();
187 Node* check =
188 graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
189 receiver_map, jsgraph()->Constant(map));
Ben Murdochda12d292016-06-02 14:46:10 +0100190 if (--num_classes == 0 && j == access_infos.size() - 1) {
191 this_controls.push_back(
192 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
193 this_effect, fallthrough_control));
194 this_effects.push_back(this_effect);
195 fallthrough_control = nullptr;
196 } else {
197 Node* branch =
198 graph()->NewNode(common()->Branch(), check, fallthrough_control);
199 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
200 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
201 this_effects.push_back(this_effect);
202 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 }
204
205 // The Number case requires special treatment to also deal with Smis.
206 if (receiver_type->Is(Type::Number())) {
Ben Murdochda12d292016-06-02 14:46:10 +0100207 // Join this check with the "receiver is smi" check above.
208 DCHECK_NOT_NULL(receiverissmi_effect);
209 DCHECK_NOT_NULL(receiverissmi_control);
210 this_effects.push_back(receiverissmi_effect);
211 this_controls.push_back(receiverissmi_control);
212 receiverissmi_effect = receiverissmi_control = nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000213 }
214
215 // Create dominating Merge+EffectPhi for this {receiver} type.
216 int const this_control_count = static_cast<int>(this_controls.size());
217 this_control =
218 (this_control_count == 1)
219 ? this_controls.front()
220 : graph()->NewNode(common()->Merge(this_control_count),
221 this_control_count, &this_controls.front());
222 this_effects.push_back(this_control);
223 int const this_effect_count = static_cast<int>(this_effects.size());
224 this_effect =
225 (this_control_count == 1)
226 ? this_effects.front()
227 : graph()->NewNode(common()->EffectPhi(this_control_count),
228 this_effect_count, &this_effects.front());
229 }
230
231 // Determine actual holder and perform prototype chain checks.
232 Handle<JSObject> holder;
233 if (access_info.holder().ToHandle(&holder)) {
234 AssumePrototypesStable(receiver_type, native_context, holder);
235 }
236
237 // Generate the actual property access.
238 if (access_info.IsNotFound()) {
239 DCHECK_EQ(AccessMode::kLoad, access_mode);
Ben Murdochda12d292016-06-02 14:46:10 +0100240 this_value = jsgraph()->UndefinedConstant();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000241 } else if (access_info.IsDataConstant()) {
242 this_value = jsgraph()->Constant(access_info.constant());
243 if (access_mode == AccessMode::kStore) {
244 Node* check = graph()->NewNode(
245 simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100246 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
247 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 }
249 } else {
250 DCHECK(access_info.IsDataField());
251 FieldIndex const field_index = access_info.field_index();
252 FieldCheck const field_check = access_info.field_check();
253 Type* const field_type = access_info.field_type();
254 switch (field_check) {
255 case FieldCheck::kNone:
256 break;
257 case FieldCheck::kJSArrayBufferViewBufferNotNeutered: {
258 Node* this_buffer = this_effect =
259 graph()->NewNode(simplified()->LoadField(
260 AccessBuilder::ForJSArrayBufferViewBuffer()),
261 this_receiver, this_effect, this_control);
262 Node* this_buffer_bit_field = this_effect =
263 graph()->NewNode(simplified()->LoadField(
264 AccessBuilder::ForJSArrayBufferBitField()),
265 this_buffer, this_effect, this_control);
266 Node* check = graph()->NewNode(
267 machine()->Word32Equal(),
268 graph()->NewNode(machine()->Word32And(), this_buffer_bit_field,
269 jsgraph()->Int32Constant(
270 1 << JSArrayBuffer::WasNeutered::kShift)),
271 jsgraph()->Int32Constant(0));
Ben Murdochda12d292016-06-02 14:46:10 +0100272 this_control =
273 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
274 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 break;
276 }
277 }
278 if (access_mode == AccessMode::kLoad &&
279 access_info.holder().ToHandle(&holder)) {
280 this_receiver = jsgraph()->Constant(holder);
281 }
282 Node* this_storage = this_receiver;
283 if (!field_index.is_inobject()) {
284 this_storage = this_effect = graph()->NewNode(
285 simplified()->LoadField(AccessBuilder::ForJSObjectProperties()),
286 this_storage, this_effect, this_control);
287 }
288 FieldAccess field_access = {kTaggedBase, field_index.offset(), name,
289 field_type, MachineType::AnyTagged()};
290 if (access_mode == AccessMode::kLoad) {
291 if (field_type->Is(Type::UntaggedFloat64())) {
292 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
293 !FLAG_unbox_double_fields) {
294 this_storage = this_effect =
295 graph()->NewNode(simplified()->LoadField(field_access),
296 this_storage, this_effect, this_control);
297 field_access.offset = HeapNumber::kValueOffset;
298 field_access.name = MaybeHandle<Name>();
299 }
300 field_access.machine_type = MachineType::Float64();
301 }
302 this_value = this_effect =
303 graph()->NewNode(simplified()->LoadField(field_access),
304 this_storage, this_effect, this_control);
305 } else {
306 DCHECK_EQ(AccessMode::kStore, access_mode);
307 if (field_type->Is(Type::UntaggedFloat64())) {
308 Node* check =
309 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100310 this_control =
311 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
312 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313 this_value = graph()->NewNode(common()->Guard(Type::Number()),
314 this_value, this_control);
315
316 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
317 !FLAG_unbox_double_fields) {
318 if (access_info.HasTransitionMap()) {
319 // Allocate a MutableHeapNumber for the new property.
320 Callable callable =
321 CodeFactory::AllocateMutableHeapNumber(isolate());
322 CallDescriptor* desc = Linkage::GetStubCallDescriptor(
323 isolate(), jsgraph()->zone(), callable.descriptor(), 0,
324 CallDescriptor::kNoFlags, Operator::kNoThrow);
325 Node* this_box = this_effect = graph()->NewNode(
326 common()->Call(desc),
327 jsgraph()->HeapConstant(callable.code()),
328 jsgraph()->NoContextConstant(), this_effect, this_control);
329 this_effect = graph()->NewNode(
330 simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
331 this_box, this_value, this_effect, this_control);
332 this_value = this_box;
333
334 field_access.type = Type::TaggedPointer();
335 } else {
336 // We just store directly to the MutableHeapNumber.
337 this_storage = this_effect =
338 graph()->NewNode(simplified()->LoadField(field_access),
339 this_storage, this_effect, this_control);
340 field_access.offset = HeapNumber::kValueOffset;
341 field_access.name = MaybeHandle<Name>();
342 field_access.machine_type = MachineType::Float64();
343 }
344 } else {
345 // Unboxed double field, we store directly to the field.
346 field_access.machine_type = MachineType::Float64();
347 }
348 } else if (field_type->Is(Type::TaggedSigned())) {
349 Node* check =
350 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100351 this_control =
352 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
353 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000354 this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
355 this_value, this_control);
356 } else if (field_type->Is(Type::TaggedPointer())) {
357 Node* check =
358 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100359 this_control =
360 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
361 this_effect, this_control);
362 if (field_type->NumClasses() == 1) {
363 // Emit a map check for the value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 Node* this_value_map = this_effect = graph()->NewNode(
365 simplified()->LoadField(AccessBuilder::ForMap()), this_value,
366 this_effect, this_control);
Ben Murdochda12d292016-06-02 14:46:10 +0100367 Node* check = graph()->NewNode(
368 simplified()->ReferenceEqual(Type::Internal()), this_value_map,
369 jsgraph()->Constant(field_type->Classes().Current()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 this_control =
Ben Murdochda12d292016-06-02 14:46:10 +0100371 graph()->NewNode(common()->DeoptimizeUnless(), check,
372 frame_state, this_effect, this_control);
373 } else {
374 DCHECK_EQ(0, field_type->NumClasses());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 }
376 } else {
377 DCHECK(field_type->Is(Type::Tagged()));
378 }
379 Handle<Map> transition_map;
380 if (access_info.transition_map().ToHandle(&transition_map)) {
381 this_effect = graph()->NewNode(common()->BeginRegion(), this_effect);
382 this_effect = graph()->NewNode(
383 simplified()->StoreField(AccessBuilder::ForMap()), this_receiver,
384 jsgraph()->Constant(transition_map), this_effect, this_control);
385 }
386 this_effect = graph()->NewNode(simplified()->StoreField(field_access),
387 this_storage, this_value, this_effect,
388 this_control);
389 if (access_info.HasTransitionMap()) {
390 this_effect =
391 graph()->NewNode(common()->FinishRegion(),
392 jsgraph()->UndefinedConstant(), this_effect);
393 }
394 }
395 }
396
397 // Remember the final state for this property access.
398 values.push_back(this_value);
399 effects.push_back(this_effect);
400 controls.push_back(this_control);
401 }
402
Ben Murdochda12d292016-06-02 14:46:10 +0100403 DCHECK_NULL(fallthrough_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404
405 // Generate the final merge point for all (polymorphic) branches.
406 int const control_count = static_cast<int>(controls.size());
407 if (control_count == 0) {
408 value = effect = control = jsgraph()->Dead();
409 } else if (control_count == 1) {
410 value = values.front();
411 effect = effects.front();
412 control = controls.front();
413 } else {
414 control = graph()->NewNode(common()->Merge(control_count), control_count,
415 &controls.front());
416 values.push_back(control);
417 value = graph()->NewNode(
418 common()->Phi(MachineRepresentation::kTagged, control_count),
419 control_count + 1, &values.front());
420 effects.push_back(control);
421 effect = graph()->NewNode(common()->EffectPhi(control_count),
422 control_count + 1, &effects.front());
423 }
424 ReplaceWithValue(node, value, effect, control);
425 return Replace(value);
426}
427
428
Ben Murdoch097c5b22016-05-18 11:27:45 +0100429Reduction JSNativeContextSpecialization::ReduceNamedAccess(
430 Node* node, Node* value, FeedbackNexus const& nexus, Handle<Name> name,
431 AccessMode access_mode, LanguageMode language_mode) {
432 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
433 node->opcode() == IrOpcode::kJSStoreNamed);
434
435 // Check if the {nexus} reports type feedback for the IC.
436 if (nexus.IsUninitialized()) {
437 if ((flags() & kDeoptimizationEnabled) &&
438 (flags() & kBailoutOnUninitialized)) {
439 // TODO(turbofan): Implement all eager bailout points correctly in
440 // the graph builder.
441 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
442 if (!OpParameter<FrameStateInfo>(frame_state).bailout_id().IsNone()) {
443 return ReduceSoftDeoptimize(node);
444 }
445 }
446 return NoChange();
447 }
448
449 // Extract receiver maps from the IC using the {nexus}.
450 MapHandleList receiver_maps;
451 if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
452 DCHECK_LT(0, receiver_maps.length());
453
454 // Try to lower the named access based on the {receiver_maps}.
455 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
456 language_mode);
457}
458
459
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
461 DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
462 NamedAccess const& p = NamedAccessOf(node->op());
463 Node* const value = jsgraph()->Dead();
464
465 // Extract receiver maps from the LOAD_IC using the LoadICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466 if (!p.feedback().IsValid()) return NoChange();
467 LoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468
469 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100470 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kLoad,
471 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000472}
473
474
475Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
476 DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
477 NamedAccess const& p = NamedAccessOf(node->op());
478 Node* const value = NodeProperties::GetValueInput(node, 1);
479
480 // Extract receiver maps from the STORE_IC using the StoreICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481 if (!p.feedback().IsValid()) return NoChange();
482 StoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483
484 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100485 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kStore,
486 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487}
488
489
490Reduction JSNativeContextSpecialization::ReduceElementAccess(
491 Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
492 AccessMode access_mode, LanguageMode language_mode,
493 KeyedAccessStoreMode store_mode) {
494 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
495 node->opcode() == IrOpcode::kJSStoreProperty);
496 Node* receiver = NodeProperties::GetValueInput(node, 0);
497 Node* context = NodeProperties::GetContextInput(node);
498 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
499 Node* effect = NodeProperties::GetEffectInput(node);
500 Node* control = NodeProperties::GetControlInput(node);
501
502 // Not much we can do if deoptimization support is disabled.
503 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
504
505 // TODO(bmeurer): Add support for non-standard stores.
506 if (store_mode != STANDARD_STORE) return NoChange();
507
508 // Retrieve the native context from the given {node}.
509 Handle<Context> native_context;
510 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
511
512 // Compute element access infos for the receiver maps.
513 AccessInfoFactory access_info_factory(dependencies(), native_context,
514 graph()->zone());
515 ZoneVector<ElementAccessInfo> access_infos(zone());
516 if (!access_info_factory.ComputeElementAccessInfos(receiver_maps, access_mode,
517 &access_infos)) {
518 return NoChange();
519 }
520
521 // Nothing to do if we have no non-deprecated maps.
522 if (access_infos.empty()) return NoChange();
523
524 // The final states for every polymorphic branch. We join them with
525 // Merge+Phi+EffectPhi at the bottom.
526 ZoneVector<Node*> values(zone());
527 ZoneVector<Node*> effects(zone());
528 ZoneVector<Node*> controls(zone());
529
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530 // Ensure that {receiver} is a heap object.
531 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
Ben Murdochda12d292016-06-02 14:46:10 +0100532 control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
533 effect, control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534
535 // Load the {receiver} map. The resulting effect is the dominating effect for
536 // all (polymorphic) branches.
537 Node* receiver_map = effect =
538 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
539 receiver, effect, control);
540
541 // Generate code for the various different element access patterns.
542 Node* fallthrough_control = control;
Ben Murdochda12d292016-06-02 14:46:10 +0100543 for (size_t j = 0; j < access_infos.size(); ++j) {
544 ElementAccessInfo const& access_info = access_infos[j];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 Node* this_receiver = receiver;
546 Node* this_value = value;
547 Node* this_index = index;
548 Node* this_effect;
549 Node* this_control;
550
551 // Perform map check on {receiver}.
552 Type* receiver_type = access_info.receiver_type();
553 bool receiver_is_jsarray = true;
554 {
555 ZoneVector<Node*> this_controls(zone());
556 ZoneVector<Node*> this_effects(zone());
Ben Murdochda12d292016-06-02 14:46:10 +0100557 size_t num_transitions = access_info.transitions().size();
558 int num_classes = access_info.receiver_type()->NumClasses();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
560 i.Advance()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100561 DCHECK_LT(0, num_classes);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 Handle<Map> map = i.Current();
563 Node* check =
564 graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
565 receiver_map, jsgraph()->Constant(map));
Ben Murdochda12d292016-06-02 14:46:10 +0100566 if (--num_classes == 0 && num_transitions == 0 &&
567 j == access_infos.size() - 1) {
568 // Last map check on the fallthrough control path, do a conditional
569 // eager deoptimization exit here.
570 // TODO(turbofan): This is ugly as hell! We should probably introduce
571 // macro-ish operators for property access that encapsulate this whole
572 // mess.
573 this_controls.push_back(graph()->NewNode(common()->DeoptimizeUnless(),
574 check, frame_state, effect,
575 fallthrough_control));
576 fallthrough_control = nullptr;
577 } else {
578 Node* branch =
579 graph()->NewNode(common()->Branch(), check, fallthrough_control);
580 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
581 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
582 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000583 this_effects.push_back(effect);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 if (!map->IsJSArrayMap()) receiver_is_jsarray = false;
585 }
586
587 // Generate possible elements kind transitions.
588 for (auto transition : access_info.transitions()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100589 DCHECK_LT(0u, num_transitions);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 Handle<Map> transition_source = transition.first;
591 Handle<Map> transition_target = transition.second;
Ben Murdochda12d292016-06-02 14:46:10 +0100592 Node* transition_control;
593 Node* transition_effect = effect;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594
595 // Check if {receiver} has the specified {transition_source} map.
596 Node* check = graph()->NewNode(
597 simplified()->ReferenceEqual(Type::Any()), receiver_map,
598 jsgraph()->HeapConstant(transition_source));
Ben Murdochda12d292016-06-02 14:46:10 +0100599 if (--num_transitions == 0 && j == access_infos.size() - 1) {
600 transition_control =
601 graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
602 transition_effect, fallthrough_control);
603 fallthrough_control = nullptr;
604 } else {
605 Node* branch =
606 graph()->NewNode(common()->Branch(), check, fallthrough_control);
607 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
608 transition_control = graph()->NewNode(common()->IfTrue(), branch);
609 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610
611 // Migrate {receiver} from {transition_source} to {transition_target}.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612 if (IsSimpleMapChangeTransition(transition_source->elements_kind(),
613 transition_target->elements_kind())) {
614 // In-place migration, just store the {transition_target} map.
615 transition_effect = graph()->NewNode(
616 simplified()->StoreField(AccessBuilder::ForMap()), receiver,
617 jsgraph()->HeapConstant(transition_target), transition_effect,
618 transition_control);
619 } else {
620 // Instance migration, let the stub deal with the {receiver}.
621 TransitionElementsKindStub stub(isolate(),
622 transition_source->elements_kind(),
623 transition_target->elements_kind(),
624 transition_source->IsJSArrayMap());
625 CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
626 isolate(), graph()->zone(), stub.GetCallInterfaceDescriptor(), 0,
627 CallDescriptor::kNeedsFrameState, node->op()->properties());
628 transition_effect = graph()->NewNode(
629 common()->Call(desc), jsgraph()->HeapConstant(stub.GetCode()),
630 receiver, jsgraph()->HeapConstant(transition_target), context,
631 frame_state, transition_effect, transition_control);
632 }
633 this_controls.push_back(transition_control);
634 this_effects.push_back(transition_effect);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 }
636
637 // Create single chokepoint for the control.
638 int const this_control_count = static_cast<int>(this_controls.size());
639 if (this_control_count == 1) {
640 this_control = this_controls.front();
641 this_effect = this_effects.front();
642 } else {
643 this_control =
644 graph()->NewNode(common()->Merge(this_control_count),
645 this_control_count, &this_controls.front());
646 this_effects.push_back(this_control);
647 this_effect =
648 graph()->NewNode(common()->EffectPhi(this_control_count),
649 this_control_count + 1, &this_effects.front());
650 }
651 }
652
653 // Certain stores need a prototype chain check because shape changes
654 // could allow callbacks on elements in the prototype chain that are
655 // not compatible with (monomorphic) keyed stores.
656 Handle<JSObject> holder;
657 if (access_info.holder().ToHandle(&holder)) {
658 AssumePrototypesStable(receiver_type, native_context, holder);
659 }
660
661 // Check that the {index} is actually a Number.
662 if (!NumberMatcher(this_index).HasValue()) {
663 Node* check =
664 graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
Ben Murdochda12d292016-06-02 14:46:10 +0100665 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
666 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000667 this_index = graph()->NewNode(common()->Guard(Type::Number()), this_index,
668 this_control);
669 }
670
671 // Convert the {index} to an unsigned32 value and check if the result is
672 // equal to the original {index}.
673 if (!NumberMatcher(this_index).IsInRange(0.0, kMaxUInt32)) {
674 Node* this_index32 =
675 graph()->NewNode(simplified()->NumberToUint32(), this_index);
676 Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
677 this_index);
Ben Murdochda12d292016-06-02 14:46:10 +0100678 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
679 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680 this_index = this_index32;
681 }
682
683 // TODO(bmeurer): We currently specialize based on elements kind. We should
684 // also be able to properly support strings and other JSObjects here.
685 ElementsKind elements_kind = access_info.elements_kind();
686
687 // Load the elements for the {receiver}.
688 Node* this_elements = this_effect = graph()->NewNode(
689 simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
690 this_receiver, this_effect, this_control);
691
692 // Don't try to store to a copy-on-write backing store.
693 if (access_mode == AccessMode::kStore &&
694 IsFastSmiOrObjectElementsKind(elements_kind)) {
695 Node* this_elements_map = this_effect =
696 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
697 this_elements, this_effect, this_control);
Ben Murdochda12d292016-06-02 14:46:10 +0100698 Node* check = graph()->NewNode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000699 simplified()->ReferenceEqual(Type::Any()), this_elements_map,
700 jsgraph()->HeapConstant(factory()->fixed_array_map()));
Ben Murdochda12d292016-06-02 14:46:10 +0100701 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
702 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 }
704
705 // Load the length of the {receiver}.
706 Node* this_length = this_effect =
707 receiver_is_jsarray
708 ? graph()->NewNode(
709 simplified()->LoadField(
710 AccessBuilder::ForJSArrayLength(elements_kind)),
711 this_receiver, this_effect, this_control)
712 : graph()->NewNode(
713 simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
714 this_elements, this_effect, this_control);
715
716 // Check that the {index} is in the valid range for the {receiver}.
717 Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
718 this_length);
Ben Murdochda12d292016-06-02 14:46:10 +0100719 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
720 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000721
722 // Compute the element access.
723 Type* element_type = Type::Any();
724 MachineType element_machine_type = MachineType::AnyTagged();
725 if (IsFastDoubleElementsKind(elements_kind)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100726 element_type = Type::Number();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000727 element_machine_type = MachineType::Float64();
728 } else if (IsFastSmiElementsKind(elements_kind)) {
729 element_type = type_cache_.kSmi;
730 }
731 ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
732 element_type, element_machine_type};
733
734 // Access the actual element.
735 // TODO(bmeurer): Refactor this into separate methods or even a separate
736 // class that deals with the elements access.
737 if (access_mode == AccessMode::kLoad) {
738 // Compute the real element access type, which includes the hole in case
739 // of holey backing stores.
740 if (elements_kind == FAST_HOLEY_ELEMENTS ||
741 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
742 element_access.type = Type::Union(
743 element_type,
744 Type::Constant(factory()->the_hole_value(), graph()->zone()),
745 graph()->zone());
746 }
747 // Perform the actual backing store access.
748 this_value = this_effect = graph()->NewNode(
749 simplified()->LoadElement(element_access), this_elements, this_index,
750 this_effect, this_control);
751 // Handle loading from holey backing stores correctly, by either mapping
752 // the hole to undefined if possible, or deoptimizing otherwise.
753 if (elements_kind == FAST_HOLEY_ELEMENTS ||
754 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
755 // Perform the hole check on the result.
756 Node* check =
757 graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
758 this_value, jsgraph()->TheHoleConstant());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 // Check if we are allowed to turn the hole into undefined.
760 Type* initial_holey_array_type = Type::Class(
761 handle(isolate()->get_initial_js_array_map(elements_kind)),
762 graph()->zone());
763 if (receiver_type->NowIs(initial_holey_array_type) &&
764 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100765 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
766 check, this_control);
767 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
768 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000769 // Add a code dependency on the array protector cell.
770 AssumePrototypesStable(receiver_type, native_context,
771 isolate()->initial_object_prototype());
772 dependencies()->AssumePropertyCell(factory()->array_protector());
773 // Turn the hole into undefined.
774 this_control =
775 graph()->NewNode(common()->Merge(2), if_true, if_false);
776 this_value = graph()->NewNode(
777 common()->Phi(MachineRepresentation::kTagged, 2),
778 jsgraph()->UndefinedConstant(), this_value, this_control);
779 element_type =
780 Type::Union(element_type, Type::Undefined(), graph()->zone());
781 } else {
782 // Deoptimize in case of the hole.
Ben Murdochda12d292016-06-02 14:46:10 +0100783 this_control =
784 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
785 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000786 }
787 // Rename the result to represent the actual type (not polluted by the
788 // hole).
789 this_value = graph()->NewNode(common()->Guard(element_type), this_value,
790 this_control);
791 } else if (elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
792 // Perform the hole check on the result.
793 Node* check =
794 graph()->NewNode(simplified()->NumberIsHoleNaN(), this_value);
795 // Check if we are allowed to return the hole directly.
796 Type* initial_holey_array_type = Type::Class(
797 handle(isolate()->get_initial_js_array_map(elements_kind)),
798 graph()->zone());
799 if (receiver_type->NowIs(initial_holey_array_type) &&
800 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
801 // Add a code dependency on the array protector cell.
802 AssumePrototypesStable(receiver_type, native_context,
803 isolate()->initial_object_prototype());
804 dependencies()->AssumePropertyCell(factory()->array_protector());
805 // Turn the hole into undefined.
806 this_value = graph()->NewNode(
807 common()->Select(MachineRepresentation::kTagged,
808 BranchHint::kFalse),
809 check, jsgraph()->UndefinedConstant(), this_value);
810 } else {
811 // Deoptimize in case of the hole.
Ben Murdochda12d292016-06-02 14:46:10 +0100812 this_control =
813 graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
814 this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815 }
816 }
817 } else {
818 DCHECK_EQ(AccessMode::kStore, access_mode);
819 if (IsFastSmiElementsKind(elements_kind)) {
820 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100821 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
822 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000823 this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
824 this_value, this_control);
825 } else if (IsFastDoubleElementsKind(elements_kind)) {
826 Node* check =
827 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
Ben Murdochda12d292016-06-02 14:46:10 +0100828 this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
829 frame_state, this_effect, this_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830 this_value = graph()->NewNode(common()->Guard(Type::Number()),
831 this_value, this_control);
832 }
833 this_effect = graph()->NewNode(simplified()->StoreElement(element_access),
834 this_elements, this_index, this_value,
835 this_effect, this_control);
836 }
837
838 // Remember the final state for this element access.
839 values.push_back(this_value);
840 effects.push_back(this_effect);
841 controls.push_back(this_control);
842 }
843
Ben Murdochda12d292016-06-02 14:46:10 +0100844 DCHECK_NULL(fallthrough_control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000845
846 // Generate the final merge point for all (polymorphic) branches.
847 int const control_count = static_cast<int>(controls.size());
848 if (control_count == 0) {
849 value = effect = control = jsgraph()->Dead();
850 } else if (control_count == 1) {
851 value = values.front();
852 effect = effects.front();
853 control = controls.front();
854 } else {
855 control = graph()->NewNode(common()->Merge(control_count), control_count,
856 &controls.front());
857 values.push_back(control);
858 value = graph()->NewNode(
859 common()->Phi(MachineRepresentation::kTagged, control_count),
860 control_count + 1, &values.front());
861 effects.push_back(control);
862 effect = graph()->NewNode(common()->EffectPhi(control_count),
863 control_count + 1, &effects.front());
864 }
865 ReplaceWithValue(node, value, effect, control);
866 return Replace(value);
867}
868
869
870Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
871 Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
872 AccessMode access_mode, LanguageMode language_mode,
873 KeyedAccessStoreMode store_mode) {
874 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
875 node->opcode() == IrOpcode::kJSStoreProperty);
876
Ben Murdoch097c5b22016-05-18 11:27:45 +0100877 // Check if the {nexus} reports type feedback for the IC.
878 if (nexus.IsUninitialized()) {
879 if ((flags() & kDeoptimizationEnabled) &&
880 (flags() & kBailoutOnUninitialized)) {
881 // TODO(turbofan): Implement all eager bailout points correctly in
882 // the graph builder.
883 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
884 if (!OpParameter<FrameStateInfo>(frame_state).bailout_id().IsNone()) {
885 return ReduceSoftDeoptimize(node);
886 }
887 }
888 return NoChange();
889 }
890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 // Extract receiver maps from the {nexus}.
892 MapHandleList receiver_maps;
893 if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
894 DCHECK_LT(0, receiver_maps.length());
895
896 // Optimize access for constant {index}.
897 HeapObjectMatcher mindex(index);
898 if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
899 // Keyed access requires a ToPropertyKey on the {index} first before
900 // looking up the property on the object (see ES6 section 12.3.2.1).
901 // We can only do this for non-observable ToPropertyKey invocations,
902 // so we limit the constant indices to primitives at this point.
903 Handle<Name> name;
904 if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
905 uint32_t array_index;
906 if (name->AsArrayIndex(&array_index)) {
907 // Use the constant array index.
908 index = jsgraph()->Constant(static_cast<double>(array_index));
909 } else {
910 name = factory()->InternalizeName(name);
911 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
912 language_mode);
913 }
914 }
915 }
916
917 // Check if we have feedback for a named access.
918 if (Name* name = nexus.FindFirstName()) {
919 return ReduceNamedAccess(node, value, receiver_maps,
920 handle(name, isolate()), access_mode,
921 language_mode, index);
922 }
923
924 // Try to lower the element access based on the {receiver_maps}.
925 return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
926 language_mode, store_mode);
927}
928
929
Ben Murdoch097c5b22016-05-18 11:27:45 +0100930Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(Node* node) {
931 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
932 Node* effect = NodeProperties::GetEffectInput(node);
933 Node* control = NodeProperties::GetControlInput(node);
934 Node* deoptimize =
935 graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kSoft), frame_state,
936 effect, control);
937 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
938 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
939 Revisit(graph()->end());
940 node->TrimInputCount(0);
941 NodeProperties::ChangeOp(node, common()->Dead());
942 return Changed(node);
943}
944
945
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000946Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
947 DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
948 PropertyAccess const& p = PropertyAccessOf(node->op());
949 Node* const index = NodeProperties::GetValueInput(node, 1);
950 Node* const value = jsgraph()->Dead();
951
952 // Extract receiver maps from the KEYED_LOAD_IC using the KeyedLoadICNexus.
953 if (!p.feedback().IsValid()) return NoChange();
954 KeyedLoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
955
956 // Try to lower the keyed access based on the {nexus}.
957 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
958 p.language_mode(), STANDARD_STORE);
959}
960
961
962Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
963 DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
964 PropertyAccess const& p = PropertyAccessOf(node->op());
965 Node* const index = NodeProperties::GetValueInput(node, 1);
966 Node* const value = NodeProperties::GetValueInput(node, 2);
967
968 // Extract receiver maps from the KEYED_STORE_IC using the KeyedStoreICNexus.
969 if (!p.feedback().IsValid()) return NoChange();
970 KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
971
972 // Extract the keyed access store mode from the KEYED_STORE_IC.
973 KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
974
975 // Try to lower the keyed access based on the {nexus}.
976 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
977 p.language_mode(), store_mode);
978}
979
980
981void JSNativeContextSpecialization::AssumePrototypesStable(
982 Type* receiver_type, Handle<Context> native_context,
983 Handle<JSObject> holder) {
984 // Determine actual holder and perform prototype chain checks.
985 for (auto i = receiver_type->Classes(); !i.Done(); i.Advance()) {
986 Handle<Map> map = i.Current();
987 // Perform the implicit ToObject for primitives here.
988 // Implemented according to ES6 section 7.3.2 GetV (V, P).
989 Handle<JSFunction> constructor;
990 if (Map::GetConstructorFunction(map, native_context)
991 .ToHandle(&constructor)) {
992 map = handle(constructor->initial_map(), isolate());
993 }
994 dependencies()->AssumePrototypeMapsStable(map, holder);
995 }
996}
997
998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999MaybeHandle<Context> JSNativeContextSpecialization::GetNativeContext(
1000 Node* node) {
1001 Node* const context = NodeProperties::GetContextInput(node);
1002 return NodeProperties::GetSpecializationNativeContext(context,
1003 native_context());
1004}
1005
1006
1007Graph* JSNativeContextSpecialization::graph() const {
1008 return jsgraph()->graph();
1009}
1010
1011
1012Isolate* JSNativeContextSpecialization::isolate() const {
1013 return jsgraph()->isolate();
1014}
1015
1016
1017Factory* JSNativeContextSpecialization::factory() const {
1018 return isolate()->factory();
1019}
1020
1021
1022MachineOperatorBuilder* JSNativeContextSpecialization::machine() const {
1023 return jsgraph()->machine();
1024}
1025
1026
1027CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
1028 return jsgraph()->common();
1029}
1030
1031
1032JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
1033 return jsgraph()->javascript();
1034}
1035
1036
1037SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
1038 return jsgraph()->simplified();
1039}
1040
1041} // namespace compiler
1042} // namespace internal
1043} // namespace v8